summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKlaus Aehlig <klaus.aehlig@huawei.com>2022-02-22 17:03:21 +0100
committerKlaus Aehlig <klaus.aehlig@huawei.com>2022-02-22 17:03:21 +0100
commit619def44c1cca9f3cdf63544d5f24f2c7a7d9b77 (patch)
tree01868de723cb82c86842f33743fa7b14e24c1fa3
downloadjustbuild-619def44c1cca9f3cdf63544d5f24f2c7a7d9b77.tar.gz
Initial self-hosting commit
This is the initial version of our tool that is able to build itself. In can be bootstrapped by ./bin/bootstrap.py Co-authored-by: Oliver Reiche <oliver.reiche@huawei.com> Co-authored-by: Victor Moreno <victor.moreno1@huawei.com>
-rw-r--r--ROOT0
-rw-r--r--TARGETS26
-rwxr-xr-xbin/bootstrap-traverser.py137
-rwxr-xr-xbin/bootstrap.py185
-rwxr-xr-xbin/just-mr.py523
-rw-r--r--etc/defaults/CC/TARGETS16
-rw-r--r--etc/defaults/CC/TARGETS.boringssl70
-rw-r--r--etc/defaults/CC/TARGETS.nowerror12
-rw-r--r--etc/defaults/CC/TARGETS.protobuf19
-rw-r--r--etc/defaults/CC/test/TARGETS1
-rw-r--r--etc/import/TARGETS.bazel_remote_apis19
-rw-r--r--etc/import/TARGETS.boringssl682
-rw-r--r--etc/import/TARGETS.cares92
-rw-r--r--etc/import/TARGETS.catch27
-rw-r--r--etc/import/TARGETS.cli115
-rw-r--r--etc/import/TARGETS.fmt12
-rw-r--r--etc/import/TARGETS.git226
-rw-r--r--etc/import/TARGETS.google_apis45
-rw-r--r--etc/import/TARGETS.grpc1601
-rw-r--r--etc/import/TARGETS.gsl8
-rw-r--r--etc/import/TARGETS.json45
-rw-r--r--etc/import/TARGETS.protobuf37
-rw-r--r--etc/import/TARGETS.re255
-rw-r--r--etc/import/TARGETS.upb32
-rw-r--r--etc/import/TARGETS.zlib32
-rw-r--r--etc/import/absl/algorithm/TARGETS.absl8
-rw-r--r--etc/import/absl/base/TARGETS.absl148
-rw-r--r--etc/import/absl/container/TARGETS.absl34
-rw-r--r--etc/import/absl/memory/TARGETS.absl8
-rw-r--r--etc/import/absl/meta/TARGETS.absl8
-rw-r--r--etc/import/absl/numeric/TARGETS.absl13
-rw-r--r--etc/import/absl/strings/TARGETS.absl115
-rw-r--r--etc/import/absl/time/TARGETS.absl30
-rw-r--r--etc/import/absl/time/internal/cctz/TARGETS.absl42
-rw-r--r--etc/import/absl/types/TARGETS.absl39
-rw-r--r--etc/import/absl/utility/TARGETS.absl12
-rw-r--r--etc/import/deps/http-parser/TARGETS.git28
-rw-r--r--etc/import/deps/pcre/TARGETS.git2248
-rw-r--r--etc/import/include/CLI/TARGETS.cli1122
-rw-r--r--etc/import/include/TARGETS.git2325
-rw-r--r--etc/import/include/fmt/TARGETS.fmt19
-rw-r--r--etc/import/include/grpc++/TARGETS.grpc102
-rw-r--r--etc/import/include/grpc/TARGETS.grpc105
-rw-r--r--etc/import/include/grpcpp/TARGETS.grpc168
-rw-r--r--etc/import/libgit2.org47
-rw-r--r--etc/import/src/TARGETS.git2779
-rw-r--r--etc/import/src/compiler/TARGETS.grpc51
-rw-r--r--etc/import/src/core/ext/upb-generated/TARGETS.grpc137
-rw-r--r--etc/import/src/google/protobuf/TARGETS.protobuf451
-rw-r--r--etc/import/src/include/openssl/TARGETS.boringssl91
-rw-r--r--etc/import/third_party/TARGETS.grpc115
-rw-r--r--etc/import/third_party/address_sorting/include/address_sorting/TARGETS.grpc7
-rw-r--r--etc/repos.json248
-rw-r--r--rules/CC/EXPRESSIONS504
-rw-r--r--rules/CC/RULES538
-rw-r--r--rules/CC/proto/EXPRESSIONS301
-rw-r--r--rules/CC/proto/RULES72
-rw-r--r--rules/CC/test/RULES265
-rw-r--r--rules/CC/test/test_runner.sh36
-rw-r--r--rules/EXPRESSIONS89
-rw-r--r--rules/data/RULES42
-rw-r--r--rules/proto/RULES105
-rw-r--r--rules/transitions/EXPRESSIONS13
-rw-r--r--src/buildtool/TARGETS1
-rw-r--r--src/buildtool/build_engine/analysed_target/TARGETS12
-rw-r--r--src/buildtool/build_engine/analysed_target/analysed_target.hpp101
-rw-r--r--src/buildtool/build_engine/base_maps/TARGETS162
-rw-r--r--src/buildtool/build_engine/base_maps/directory_map.cpp35
-rw-r--r--src/buildtool/build_engine/base_maps/directory_map.hpp22
-rw-r--r--src/buildtool/build_engine/base_maps/entity_name.hpp206
-rw-r--r--src/buildtool/build_engine/base_maps/entity_name_data.hpp129
-rw-r--r--src/buildtool/build_engine/base_maps/expression_function.hpp103
-rw-r--r--src/buildtool/build_engine/base_maps/expression_map.cpp90
-rw-r--r--src/buildtool/build_engine/base_maps/expression_map.hpp32
-rw-r--r--src/buildtool/build_engine/base_maps/field_reader.hpp231
-rw-r--r--src/buildtool/build_engine/base_maps/json_file_map.hpp93
-rw-r--r--src/buildtool/build_engine/base_maps/module_name.hpp36
-rw-r--r--src/buildtool/build_engine/base_maps/rule_map.cpp371
-rw-r--r--src/buildtool/build_engine/base_maps/rule_map.hpp33
-rw-r--r--src/buildtool/build_engine/base_maps/source_map.cpp88
-rw-r--r--src/buildtool/build_engine/base_maps/source_map.hpp24
-rw-r--r--src/buildtool/build_engine/base_maps/targets_file_map.hpp23
-rw-r--r--src/buildtool/build_engine/base_maps/user_rule.hpp404
-rw-r--r--src/buildtool/build_engine/expression/TARGETS46
-rw-r--r--src/buildtool/build_engine/expression/configuration.hpp154
-rw-r--r--src/buildtool/build_engine/expression/evaluator.cpp936
-rw-r--r--src/buildtool/build_engine/expression/evaluator.hpp76
-rw-r--r--src/buildtool/build_engine/expression/expression.cpp249
-rw-r--r--src/buildtool/build_engine/expression/expression.hpp380
-rw-r--r--src/buildtool/build_engine/expression/expression_ptr.cpp89
-rw-r--r--src/buildtool/build_engine/expression/expression_ptr.hpp95
-rw-r--r--src/buildtool/build_engine/expression/function_map.hpp23
-rw-r--r--src/buildtool/build_engine/expression/linked_map.hpp414
-rw-r--r--src/buildtool/build_engine/expression/target_node.cpp20
-rw-r--r--src/buildtool/build_engine/expression/target_node.hpp83
-rw-r--r--src/buildtool/build_engine/expression/target_result.hpp33
-rw-r--r--src/buildtool/build_engine/target_map/TARGETS50
-rw-r--r--src/buildtool/build_engine/target_map/built_in_rules.cpp857
-rw-r--r--src/buildtool/build_engine/target_map/built_in_rules.hpp21
-rw-r--r--src/buildtool/build_engine/target_map/configured_target.hpp41
-rw-r--r--src/buildtool/build_engine/target_map/export.cpp126
-rw-r--r--src/buildtool/build_engine/target_map/export.hpp17
-rw-r--r--src/buildtool/build_engine/target_map/result_map.hpp291
-rw-r--r--src/buildtool/build_engine/target_map/target_map.cpp1338
-rw-r--r--src/buildtool/build_engine/target_map/target_map.hpp27
-rw-r--r--src/buildtool/build_engine/target_map/utils.cpp197
-rw-r--r--src/buildtool/build_engine/target_map/utils.hpp55
-rw-r--r--src/buildtool/common/TARGETS101
-rw-r--r--src/buildtool/common/action.hpp78
-rw-r--r--src/buildtool/common/action_description.hpp200
-rw-r--r--src/buildtool/common/artifact.hpp214
-rw-r--r--src/buildtool/common/artifact_description.hpp316
-rw-r--r--src/buildtool/common/artifact_digest.hpp74
-rw-r--r--src/buildtool/common/artifact_factory.hpp91
-rw-r--r--src/buildtool/common/bazel_types.hpp86
-rw-r--r--src/buildtool/common/cli.hpp365
-rw-r--r--src/buildtool/common/identifier.hpp25
-rw-r--r--src/buildtool/common/repository_config.hpp133
-rw-r--r--src/buildtool/common/statistics.hpp61
-rw-r--r--src/buildtool/common/tree.hpp72
-rw-r--r--src/buildtool/crypto/TARGETS31
-rw-r--r--src/buildtool/crypto/hash_generator.hpp130
-rw-r--r--src/buildtool/crypto/hash_impl.hpp40
-rw-r--r--src/buildtool/crypto/hash_impl_git.cpp42
-rw-r--r--src/buildtool/crypto/hash_impl_git.hpp10
-rw-r--r--src/buildtool/crypto/hash_impl_md5.cpp50
-rw-r--r--src/buildtool/crypto/hash_impl_md5.hpp10
-rw-r--r--src/buildtool/crypto/hash_impl_sha1.cpp50
-rw-r--r--src/buildtool/crypto/hash_impl_sha1.hpp10
-rw-r--r--src/buildtool/crypto/hash_impl_sha256.cpp50
-rw-r--r--src/buildtool/crypto/hash_impl_sha256.hpp10
-rw-r--r--src/buildtool/execution_api/TARGETS1
-rw-r--r--src/buildtool/execution_api/bazel_msg/TARGETS31
-rw-r--r--src/buildtool/execution_api/bazel_msg/bazel_blob.hpp31
-rw-r--r--src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp264
-rw-r--r--src/buildtool/execution_api/bazel_msg/bazel_common.hpp21
-rw-r--r--src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp590
-rw-r--r--src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp128
-rw-r--r--src/buildtool/execution_api/common/TARGETS22
-rw-r--r--src/buildtool/execution_api/common/execution_action.hpp58
-rw-r--r--src/buildtool/execution_api/common/execution_api.hpp78
-rw-r--r--src/buildtool/execution_api/common/execution_common.hpp109
-rw-r--r--src/buildtool/execution_api/common/execution_response.hpp48
-rw-r--r--src/buildtool/execution_api/common/local_tree_map.hpp140
-rw-r--r--src/buildtool/execution_api/local/TARGETS36
-rw-r--r--src/buildtool/execution_api/local/config.hpp137
-rw-r--r--src/buildtool/execution_api/local/file_storage.hpp107
-rw-r--r--src/buildtool/execution_api/local/local_ac.hpp82
-rw-r--r--src/buildtool/execution_api/local/local_action.cpp295
-rw-r--r--src/buildtool/execution_api/local/local_action.hpp122
-rw-r--r--src/buildtool/execution_api/local/local_api.hpp157
-rw-r--r--src/buildtool/execution_api/local/local_cas.hpp103
-rw-r--r--src/buildtool/execution_api/local/local_response.hpp101
-rw-r--r--src/buildtool/execution_api/local/local_storage.cpp125
-rw-r--r--src/buildtool/execution_api/local/local_storage.hpp109
-rw-r--r--src/buildtool/execution_api/remote/TARGETS59
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_ac_client.cpp75
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_ac_client.hpp41
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_action.cpp94
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_action.hpp54
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_api.cpp177
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_api.hpp65
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp354
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp169
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_client_common.hpp54
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_execution_client.cpp129
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp66
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_network.cpp327
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_network.hpp118
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_response.cpp125
-rw-r--r--src/buildtool/execution_api/remote/bazel/bazel_response.hpp77
-rw-r--r--src/buildtool/execution_api/remote/bazel/bytestream_client.hpp185
-rw-r--r--src/buildtool/execution_api/remote/config.hpp72
-rw-r--r--src/buildtool/execution_engine/TARGETS1
-rw-r--r--src/buildtool/execution_engine/dag/TARGETS17
-rw-r--r--src/buildtool/execution_engine/dag/dag.cpp263
-rw-r--r--src/buildtool/execution_engine/dag/dag.hpp613
-rw-r--r--src/buildtool/execution_engine/executor/TARGETS16
-rw-r--r--src/buildtool/execution_engine/executor/executor.hpp532
-rw-r--r--src/buildtool/execution_engine/traverser/TARGETS14
-rw-r--r--src/buildtool/execution_engine/traverser/traverser.hpp187
-rw-r--r--src/buildtool/file_system/TARGETS79
-rw-r--r--src/buildtool/file_system/file_root.hpp239
-rw-r--r--src/buildtool/file_system/file_system_manager.hpp565
-rw-r--r--src/buildtool/file_system/git_cas.cpp180
-rw-r--r--src/buildtool/file_system/git_cas.hpp60
-rw-r--r--src/buildtool/file_system/git_tree.cpp178
-rw-r--r--src/buildtool/file_system/git_tree.hpp87
-rw-r--r--src/buildtool/file_system/jsonfs.hpp47
-rw-r--r--src/buildtool/file_system/object_type.hpp44
-rw-r--r--src/buildtool/file_system/system_command.hpp202
-rw-r--r--src/buildtool/graph_traverser/TARGETS22
-rw-r--r--src/buildtool/graph_traverser/graph_traverser.hpp569
-rw-r--r--src/buildtool/logging/TARGETS20
-rw-r--r--src/buildtool/logging/log_config.hpp69
-rw-r--r--src/buildtool/logging/log_level.hpp41
-rw-r--r--src/buildtool/logging/log_sink.hpp41
-rw-r--r--src/buildtool/logging/log_sink_cmdline.hpp93
-rw-r--r--src/buildtool/logging/log_sink_file.hpp129
-rw-r--r--src/buildtool/logging/logger.hpp123
-rw-r--r--src/buildtool/main/TARGETS21
-rw-r--r--src/buildtool/main/main.cpp1292
-rw-r--r--src/buildtool/main/main.hpp10
-rw-r--r--src/buildtool/multithreading/TARGETS54
-rw-r--r--src/buildtool/multithreading/async_map.hpp109
-rw-r--r--src/buildtool/multithreading/async_map_consumer.hpp331
-rw-r--r--src/buildtool/multithreading/async_map_node.hpp173
-rw-r--r--src/buildtool/multithreading/notification_queue.hpp188
-rw-r--r--src/buildtool/multithreading/task.hpp38
-rw-r--r--src/buildtool/multithreading/task_system.cpp56
-rw-r--r--src/buildtool/multithreading/task_system.hpp65
-rw-r--r--src/utils/TARGETS1
-rw-r--r--src/utils/cpp/TARGETS40
-rw-r--r--src/utils/cpp/atomic.hpp119
-rw-r--r--src/utils/cpp/concepts.hpp55
-rw-r--r--src/utils/cpp/hash_combine.hpp15
-rw-r--r--src/utils/cpp/hex_string.hpp19
-rw-r--r--src/utils/cpp/json.hpp83
-rw-r--r--src/utils/cpp/type_safe_arithmetic.hpp197
-rw-r--r--test/TARGETS24
-rw-r--r--test/buildtool/TARGETS15
-rw-r--r--test/buildtool/build_engine/base_maps/TARGETS127
-rw-r--r--test/buildtool/build_engine/base_maps/data/test_repo.bundlebin0 -> 2687 bytes
-rw-r--r--test/buildtool/build_engine/base_maps/data_expr/EXPRESSIONS66
-rw-r--r--test/buildtool/build_engine/base_maps/data_expr/readers/EXPRESSIONS23
-rw-r--r--test/buildtool/build_engine/base_maps/data_json/bad.json1
-rw-r--r--test/buildtool/build_engine/base_maps/data_json/foo.json3
-rw-r--r--test/buildtool/build_engine/base_maps/data_rule/RULES232
-rw-r--r--test/buildtool/build_engine/base_maps/data_rule/composers/EXPRESSIONS28
-rw-r--r--test/buildtool/build_engine/base_maps/data_src/file0
-rw-r--r--test/buildtool/build_engine/base_maps/data_src/foo/bar/file0
-rw-r--r--test/buildtool/build_engine/base_maps/directory_map.test.cpp87
-rw-r--r--test/buildtool/build_engine/base_maps/entity_name.test.cpp22
-rw-r--r--test/buildtool/build_engine/base_maps/expression_map.test.cpp208
-rw-r--r--test/buildtool/build_engine/base_maps/json_file_map.test.cpp135
-rw-r--r--test/buildtool/build_engine/base_maps/rule_map.test.cpp348
-rw-r--r--test/buildtool/build_engine/base_maps/source_map.test.cpp144
-rw-r--r--test/buildtool/build_engine/base_maps/test_repo.hpp41
-rw-r--r--test/buildtool/build_engine/expression/TARGETS42
-rw-r--r--test/buildtool/build_engine/expression/configuration.test.cpp107
-rw-r--r--test/buildtool/build_engine/expression/expression.test.cpp1401
-rw-r--r--test/buildtool/build_engine/expression/linked_map.test.cpp252
-rw-r--r--test/buildtool/build_engine/target_map/TARGETS72
-rw-r--r--test/buildtool/build_engine/target_map/data_rules/result/RULES153
-rw-r--r--test/buildtool/build_engine/target_map/data_rules/rule/RULES1
-rw-r--r--test/buildtool/build_engine/target_map/data_rules/simple_rules/RULES293
-rw-r--r--test/buildtool/build_engine/target_map/data_rules/tree/RULES87
-rw-r--r--test/buildtool/build_engine/target_map/data_src/a/b/targets_here/c/d/foo1
-rw-r--r--test/buildtool/build_engine/target_map/data_src/file_reference/hello.txt1
-rw-r--r--test/buildtool/build_engine/target_map/data_src/foo0
-rw-r--r--test/buildtool/build_engine/target_map/data_src/simple_rules/implicit_script.sh3
-rw-r--r--test/buildtool/build_engine/target_map/data_src/simple_targets/bar.txt1
-rw-r--r--test/buildtool/build_engine/target_map/data_src/simple_targets/baz.txt1
-rw-r--r--test/buildtool/build_engine/target_map/data_src/simple_targets/foo.txt1
-rw-r--r--test/buildtool/build_engine/target_map/data_src/tree/foo.txt0
-rw-r--r--test/buildtool/build_engine/target_map/data_src/tree/tree/foo.txt0
-rw-r--r--test/buildtool/build_engine/target_map/data_src/x/foo0
-rw-r--r--test/buildtool/build_engine/target_map/data_src/x/x/foo0
-rw-r--r--test/buildtool/build_engine/target_map/data_src/x/x/x/foo0
-rw-r--r--test/buildtool/build_engine/target_map/data_src/x/x/x/x/foo0
-rw-r--r--test/buildtool/build_engine/target_map/data_src/x/x/x/x/x/foo0
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/TARGETS1
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/a/b/targets_here/TARGETS1
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/bad_targets/TARGETS16
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/config_targets/TARGETS27
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/file_reference/TARGETS9
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/result/TARGETS46
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/simple_rules/TARGETS1
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/simple_targets/TARGETS156
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/tree/TARGETS23
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/x/TARGETS1
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/x/x/TARGETS1
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/x/x/x/TARGETS9
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/x/x/x/x/TARGETS1
-rw-r--r--test/buildtool/build_engine/target_map/data_targets/x/x/x/x/x/TARGETS1
-rw-r--r--test/buildtool/build_engine/target_map/result_map.test.cpp139
-rw-r--r--test/buildtool/build_engine/target_map/target_map.test.cpp914
-rw-r--r--test/buildtool/common/TARGETS41
-rw-r--r--test/buildtool/common/action_description.test.cpp72
-rw-r--r--test/buildtool/common/artifact_description.test.cpp127
-rw-r--r--test/buildtool/common/artifact_factory.test.cpp54
-rw-r--r--test/buildtool/crypto/TARGETS13
-rw-r--r--test/buildtool/crypto/crypto.test.cpp57
-rw-r--r--test/buildtool/execution_api/TARGETS30
-rw-r--r--test/buildtool/execution_api/bazel/TARGETS87
-rw-r--r--test/buildtool/execution_api/bazel/bazel_ac_client.test.cpp50
-rw-r--r--test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp73
-rwxr-xr-xtest/buildtool/execution_api/bazel/bazel_execution_client.test.cpp102
-rw-r--r--test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp53
-rw-r--r--test/buildtool/execution_api/bazel/bazel_network.test.cpp45
-rw-r--r--test/buildtool/execution_api/bazel/bytestream_client.test.cpp169
-rwxr-xr-xtest/buildtool/execution_api/data/executable_file1
-rwxr-xr-xtest/buildtool/execution_api/data/non_executable_file1
-rw-r--r--test/buildtool/execution_api/data/subdir1/file11
-rw-r--r--test/buildtool/execution_api/data/subdir1/subdir2/file21
-rw-r--r--test/buildtool/execution_api/local/TARGETS73
-rw-r--r--test/buildtool/execution_api/local/local_ac.test.cpp121
-rw-r--r--test/buildtool/execution_api/local/local_api.test.cpp299
-rw-r--r--test/buildtool/execution_api/local/local_cas.test.cpp88
-rwxr-xr-xtest/buildtool/execution_api/local/local_execution.test.cpp274
-rw-r--r--test/buildtool/execution_api/local/local_storage.test.cpp180
-rw-r--r--test/buildtool/execution_api/local_tree_map.test.cpp110
-rw-r--r--test/buildtool/execution_engine/TARGETS10
-rw-r--r--test/buildtool/execution_engine/dag/TARGETS15
-rw-r--r--test/buildtool/execution_engine/dag/dag.test.cpp293
-rw-r--r--test/buildtool/execution_engine/executor/TARGETS71
-rw-r--r--test/buildtool/execution_engine/executor/data/greeter/greet.cpp6
-rw-r--r--test/buildtool/execution_engine/executor/data/greeter/greet.hpp3
-rw-r--r--test/buildtool/execution_engine/executor/data/greeter/greet_mod.cpp8
-rw-r--r--test/buildtool/execution_engine/executor/data/greeter/main.cpp6
-rw-r--r--test/buildtool/execution_engine/executor/data/hello_world/main.cpp6
-rwxr-xr-xtest/buildtool/execution_engine/executor/executor.test.cpp358
-rwxr-xr-xtest/buildtool/execution_engine/executor/executor_api.test.hpp615
-rwxr-xr-xtest/buildtool/execution_engine/executor/executor_api_local.test.cpp36
-rwxr-xr-xtest/buildtool/execution_engine/executor/executor_api_remote_bazel.test.cpp71
-rw-r--r--test/buildtool/execution_engine/traverser/TARGETS16
-rw-r--r--test/buildtool/execution_engine/traverser/traverser.test.cpp836
-rw-r--r--test/buildtool/file_system/TARGETS62
-rwxr-xr-xtest/buildtool/file_system/data/empty_executable0
-rw-r--r--test/buildtool/file_system/data/example_file3
-rw-r--r--test/buildtool/file_system/data/test_repo.bundlebin0 -> 543 bytes
-rw-r--r--test/buildtool/file_system/file_root.test.cpp224
-rw-r--r--test/buildtool/file_system/file_system_manager.test.cpp346
-rw-r--r--test/buildtool/file_system/git_tree.test.cpp527
-rw-r--r--test/buildtool/file_system/system_command.test.cpp115
-rw-r--r--test/buildtool/graph_traverser/TARGETS84
-rw-r--r--test/buildtool/graph_traverser/data/copy_local_file/_entry_points9
-rw-r--r--test/buildtool/graph_traverser/data/copy_local_file/copy_me.hpp0
-rw-r--r--test/buildtool/graph_traverser/data/copy_local_file/graph_description5
-rw-r--r--test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points9
-rw-r--r--test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points_ctimes9
-rw-r--r--test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points_stripped9
-rw-r--r--test/buildtool/graph_traverser/data/flaky_hello_world/graph_description83
-rw-r--r--test/buildtool/graph_traverser/data/flaky_hello_world/hello_world.cpp6
-rw-r--r--test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points9
-rwxr-xr-xtest/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points_get_executable9
-rw-r--r--test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points_upload_source9
-rw-r--r--test/buildtool/graph_traverser/data/hello_world_copy_message/graph_description45
-rw-r--r--test/buildtool/graph_traverser/data/hello_world_copy_message/hello_world.cpp5
-rw-r--r--test/buildtool/graph_traverser/data/hello_world_known_source/_entry_points9
-rw-r--r--test/buildtool/graph_traverser/data/hello_world_known_source/graph_description46
-rw-r--r--test/buildtool/graph_traverser/data/sequence_printer_build_library_only/_entry_points9
-rw-r--r--test/buildtool/graph_traverser/data/sequence_printer_build_library_only/_entry_points_full_build9
-rw-r--r--test/buildtool/graph_traverser/data/sequence_printer_build_library_only/graph_description173
-rw-r--r--test/buildtool/graph_traverser/data/sequence_printer_build_library_only/main.cpp27
-rw-r--r--test/buildtool/graph_traverser/data/sequence_printer_build_library_only/printer/printer.hpp18
-rw-r--r--test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/fibonacci.cpp17
-rw-r--r--test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/fibonacci.hpp17
-rw-r--r--test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/random_dna_sequence.cpp25
-rw-r--r--test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/random_dna_sequence.hpp19
-rw-r--r--test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/sequence.hpp15
-rw-r--r--test/buildtool/graph_traverser/data/use_env_variables/_entry_points9
-rw-r--r--test/buildtool/graph_traverser/data/use_env_variables/graph_description18
-rw-r--r--test/buildtool/graph_traverser/data/use_nested_trees/_entry_points9
-rw-r--r--test/buildtool/graph_traverser/data/use_nested_trees/graph_description51
-rw-r--r--test/buildtool/graph_traverser/data/use_trees/_entry_points9
-rw-r--r--test/buildtool/graph_traverser/data/use_trees/graph_description70
-rw-r--r--test/buildtool/graph_traverser/data/use_uploaded_blobs/_entry_points9
-rw-r--r--test/buildtool/graph_traverser/data/use_uploaded_blobs/graph_description34
-rw-r--r--test/buildtool/graph_traverser/graph_traverser.test.hpp412
-rw-r--r--test/buildtool/graph_traverser/graph_traverser_local.test.cpp57
-rw-r--r--test/buildtool/graph_traverser/graph_traverser_remote.test.cpp41
-rw-r--r--test/buildtool/logging/TARGETS26
-rw-r--r--test/buildtool/logging/log_sink_file.test.cpp99
-rw-r--r--test/buildtool/logging/logger.test.cpp322
-rw-r--r--test/buildtool/multithreading/TARGETS76
-rw-r--r--test/buildtool/multithreading/async_map.test.cpp58
-rw-r--r--test/buildtool/multithreading/async_map_consumer.test.cpp309
-rw-r--r--test/buildtool/multithreading/async_map_node.test.cpp93
-rw-r--r--test/buildtool/multithreading/task.test.cpp328
-rw-r--r--test/buildtool/multithreading/task_system.test.cpp225
-rw-r--r--test/main.cpp8
-rw-r--r--test/utils/TARGETS57
-rw-r--r--test/utils/container_matchers.hpp174
-rw-r--r--test/utils/hermeticity/local.hpp39
-rw-r--r--test/utils/logging/log_config.hpp42
-rw-r--r--test/utils/remote_execution/bazel_action_creator.hpp75
-rwxr-xr-xtest/utils/remote_execution/main-remote-execution.cpp50
-rw-r--r--test/utils/test_env.hpp44
-rw-r--r--test/utils/typed_testfixtures.py12
380 files changed, 45974 insertions, 0 deletions
diff --git a/ROOT b/ROOT
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/ROOT
diff --git a/TARGETS b/TARGETS
new file mode 100644
index 00000000..4340bb91
--- /dev/null
+++ b/TARGETS
@@ -0,0 +1,26 @@
+{ "exported-just":
+ { "type": "export"
+ , "target": ["src/buildtool/main", "just"]
+ , "flexible_config":
+ ["OS", "ARCH", "HOST_ARCH", "TARGET_ARCH", "CXX", "AR", "ENV"]
+ }
+, "just":
+ { "type": ["@", "rules", "CC", "configure"]
+ , "arguments_config": ["OS", "ARCH"]
+ , "os": [{"type": "var", "name": "OS", "default": "linux"}]
+ , "arch": [{"type": "var", "name": "ARCH", "default": "x86_64"}]
+ , "target": ["exported-just"]
+ }
+, "libgit2":
+ { "type": "export"
+ , "target": ["@", "libgit2", "", "git2"]
+ , "fixed_config":
+ { "USE_SHA1": "OpenSSL"
+ , "USE_SSH": false
+ , "USE_HTTPS": false
+ , "USE_GSSAPI": false
+ }
+ , "flexible_config":
+ ["OS", "ARCH", "HOST_ARCH", "TARGET_ARCH", "CXX", "AR", "ENV"]
+ }
+}
diff --git a/bin/bootstrap-traverser.py b/bin/bootstrap-traverser.py
new file mode 100755
index 00000000..96b0a293
--- /dev/null
+++ b/bin/bootstrap-traverser.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python3
+
+import hashlib
+import json
+import os
+import shutil
+import subprocess
+import sys
+
+from optparse import OptionParser
+
+def log(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+def fail(s):
+ log(s)
+ sys.exit(1)
+
+def git_hash(content):
+ header = "blob {}\0".format(len(content)).encode('utf-8')
+ h = hashlib.sha1()
+ h.update(header)
+ h.update(content)
+ return h.hexdigest()
+
+def create_blobs(blobs, *, root):
+ os.makedirs(os.path.join(root, "KNOWN"))
+ for blob in blobs:
+ blob_bin = blob.encode('utf-8')
+ with open(os.path.join(root, "KNOWN", git_hash(blob_bin)), "wb") as f:
+ f.write(blob_bin)
+
+def build_known(desc, *, root):
+ return os.path.join(root, "KNOWN", desc["data"]["id"])
+
+def link(src, dest):
+ os.makedirs(os.path.dirname(dest), exist_ok=True)
+ os.symlink(src, dest)
+
+def build_local(desc, *, root, config):
+ repo_name = desc["data"]["repository"]
+ repo = config["repositories"][repo_name]["workspace_root"]
+ rel_path = desc["data"]["path"]
+ if repo[0] == "file":
+ return os.path.join(repo[1], rel_path)
+ fail("Unsupported repository root %r" % (repo,))
+
+def build_tree(desc, *, config, root, graph):
+ tree_id = desc["data"]["id"]
+ tree_dir = os.path.normpath(os.path.join(root, "TREE", tree_id))
+ if os.path.isdir(tree_dir):
+ return tree_dir
+ os.makedirs(tree_dir)
+ tree_desc = graph["trees"][tree_id]
+ for location, desc in tree_desc.items():
+ link(build(desc, config=config, root=root, graph=graph),
+ os.path.join(tree_dir, location))
+ return tree_dir
+
+def run_action(action_id, *, config, root, graph):
+ action_dir = os.path.normpath(os.path.join(root, "ACTION", action_id))
+ if os.path.isdir(action_dir):
+ return action_dir
+ os.makedirs(action_dir)
+ action_desc = graph["actions"][action_id]
+ for location, desc in action_desc["input"].items():
+ link(build(desc, config=config, root=root, graph=graph),
+ os.path.join(action_dir, location))
+ cmd = action_desc["command"]
+ env = action_desc.get("env")
+ log("Running %r with env %r for action %r"
+ % (cmd, env, action_id))
+ for out in action_desc["output"]:
+ os.makedirs(os.path.join(action_dir, os.path.dirname(out)),
+ exist_ok=True)
+ subprocess.run(cmd, env=env, cwd=action_dir, check=True)
+ return action_dir
+
+def build_action(desc, *, config, root, graph):
+ action_dir = run_action(desc["data"]["id"], config=config, root=root, graph=graph)
+ return os.path.join(action_dir, desc["data"]["path"])
+
+def build(desc, *, config, root, graph):
+ if desc["type"] == "TREE":
+ return build_tree(desc, config=config, root=root, graph=graph)
+ if desc["type"] == "ACTION":
+ return build_action(desc, config=config, root=root, graph=graph)
+ if desc["type"] == "KNOWN":
+ return build_known(desc, root=root)
+ if desc["type"] == "LOCAL":
+ return build_local(desc, root=root, config=config)
+ fail("Don't know how to build artifact %r" % (desc,))
+
+def traverse(*, graph, to_build, out, root, config):
+ os.makedirs(out, exist_ok=True)
+ os.makedirs(root, exist_ok=True)
+ create_blobs(graph["blobs"], root=root)
+ for location, artifact in to_build.items():
+ link(build(artifact, config=config, root=root, graph=graph),
+ os.path.join(out, location))
+
+def main():
+ parser = OptionParser()
+ parser.add_option("-C", dest="repository_config",
+ help="Repository-description file to use",
+ metavar="FILE")
+ parser.add_option("-o", dest="output_directory",
+ help="Directory to place output to")
+ parser.add_option("--local_build_root", dest="local_build_root",
+ help="Root for storing intermediate outputs",
+ metavar="PATH")
+ parser.add_option("--default_workspace", dest="default_workspace",
+ help="Workspace root to use if none is specified",
+ metavar="PATH")
+ (options, args) = parser.parse_args()
+ if len(args) != 2:
+ fail("usage: %r <graph> <targets_to_build>"
+ % (sys.argv[0],))
+ with open(args[0]) as f:
+ graph = json.load(f)
+ with open(args[1]) as f:
+ to_build = json.load(f)
+ out = os.path.abspath(options.output_directory or "out-boot")
+ root = os.path.abspath(options.local_build_root or ".just-boot")
+ with open(options.repository_config or "repo-conf.json") as f:
+ config = json.load(f)
+ if options.default_workspace:
+ ws_root = os.path.abspath(options.default_workspace)
+ repos = config.get("repositories", {}).keys()
+ for repo in repos:
+ if not "workspace_root" in config["repositories"][repo]:
+ config["repositories"][repo]["workspace_root"] = ["file", ws_root]
+ traverse(graph=graph, to_build=to_build, out=out, root=root, config=config)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/bin/bootstrap.py b/bin/bootstrap.py
new file mode 100755
index 00000000..9eb4a9cc
--- /dev/null
+++ b/bin/bootstrap.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python3
+
+import hashlib
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+from pathlib import Path
+
+# path within the repository (constants)
+
+REPOS = "etc/repos.json"
+BOOTSTRAP_CC = ["clang++", "-std=c++20", "-DBOOTSTRAP_BUILD_TOOL"]
+MAIN_MODULE = ""
+MAIN_TARGET = "just"
+MAIN_STAGE = "src/buildtool/main/just"
+
+# relevant directories (global variables)
+
+SRCDIR = os.getcwd()
+WRKDIR = None
+DISTDIR = []
+
+def git_hash(content):
+ header = "blob {}\0".format(len(content)).encode('utf-8')
+ h = hashlib.sha1()
+ h.update(header)
+ h.update(content)
+ return h.hexdigest()
+
+def get_checksum(filename):
+ with open(filename, "rb") as f:
+ data = f.read()
+ return git_hash(data)
+
+def get_archive(*, distfile, fetch):
+ # Fetch the archive, if necessary. Return path to archive
+ for d in DISTDIR:
+ candidate_path = os.path.join(d, distfile)
+ if os.path.isfile(candidate_path):
+ return candidate_path
+ # Fetch to bootstrap working directory
+ fetch_dir = os.path.join(WRKDIR, "fetch")
+ os.makedirs(fetch_dir, exist_ok=True)
+ target = os.path.join(fetch_dir, distfile)
+ subprocess.run(["wget", "-O", target, fetch])
+ return target
+
+def run(cmd, *, cwd, **kwargs):
+ print("Running %r in %r" % (cmd, cwd), flush=True)
+ subprocess.run(cmd, cwd=cwd, check=True, **kwargs)
+
+def setup_deps():
+ # unpack all dependencies and return a list of
+ # additional C++ flags required
+ with open(os.path.join(SRCDIR, REPOS)) as f:
+ config = json.load(f)["repositories"]
+ include_location = os.path.join(WRKDIR, "dep_includes")
+ link_flags = []
+ os.makedirs(include_location)
+ for repo, total_desc in config.items():
+ desc = total_desc.get("repository", {})
+ if not isinstance(desc, dict):
+ # Indirect definition; we will set up the repository at the
+ # resolved place, which also has to be part of the global
+ # repository description.
+ continue
+ hints = total_desc.get("bootstrap", {})
+ if desc.get("type") in ["archive", "zip"]:
+ fetch = desc["fetch"]
+ distfile = desc.get("distfile") or os.path.basename(fetch)
+ archive = get_archive(distfile=distfile, fetch=fetch)
+ actual_checksum = get_checksum(archive)
+ expected_checksum = desc.get("content")
+ if actual_checksum != expected_checksum:
+ print("Checksum mismatch for %r. Expected %r, found %r"
+ % (archive, expected_checksum, actual_checksum))
+ print("Unpacking %r from %r" % (repo, archive))
+ unpack_location = os.path.join(WRKDIR, "deps", repo)
+ os.makedirs(unpack_location)
+ if desc["type"] == "zip":
+ subprocess.run(["unzip", "-d", ".", archive],
+ cwd=unpack_location, stdout=subprocess.DEVNULL)
+ else:
+ subprocess.run(["tar", "xf", archive],
+ cwd=unpack_location)
+ subdir = os.path.join(unpack_location,
+ desc.get("subdir", "."))
+ include_dir = os.path.join(subdir,
+ hints.get("include_dir", "."))
+ include_name = hints.get("include_name", repo)
+ os.symlink(os.path.normpath(include_dir),
+ os.path.join(include_location, include_name))
+ if "build" in hints:
+ run(["sh", "-c", hints["build"]], cwd=subdir)
+ if "link" in hints:
+ link_flags.extend(["-L", subdir])
+ if "link" in hints:
+ link_flags.extend(hints["link"])
+
+ return {
+ "include": ["-I", include_location],
+ "link": link_flags
+ }
+
+def bootstrap():
+ # TODO: add package build mode, building against preinstalled dependencies
+ # rather than building dependencies ourselves.
+ print("Bootstrapping in %r from sources %r, taking files from %r"
+ % (WRKDIR, SRCDIR, DISTDIR))
+ os.makedirs(WRKDIR, exist_ok=True)
+ dep_flags = setup_deps();
+ # handle proto
+ src_wrkdir = os.path.join(WRKDIR, "src")
+ shutil.copytree(SRCDIR, src_wrkdir)
+ flags = ["-I", src_wrkdir] + dep_flags["include"]
+ cpp_files = []
+ for root, dirs, files in os.walk(src_wrkdir):
+ if 'test' in dirs:
+ dirs.remove('test')
+ if 'execution_api' in dirs:
+ dirs.remove('execution_api')
+ for f in files:
+ if f.endswith(".cpp"):
+ cpp_files.append(os.path.join(root, f))
+ object_files = []
+ for f in cpp_files:
+ obj_file_name =f[:-len(".cpp")] + ".o"
+ object_files.append(obj_file_name)
+ cmd = BOOTSTRAP_CC + flags + ["-c", f, "-o", obj_file_name]
+ run(cmd, cwd=src_wrkdir)
+ bootstrap_just = os.path.join(WRKDIR, "bootstrap-just")
+ cmd = BOOTSTRAP_CC + ["-o", bootstrap_just] + object_files + dep_flags["link"]
+ run(cmd, cwd=src_wrkdir)
+ CONF_FILE = os.path.join(WRKDIR, "repo-conf.json")
+ LOCAL_ROOT = os.path.join(WRKDIR, ".just")
+ os.makedirs(LOCAL_ROOT, exist_ok=True)
+ run(["sh", "-c",
+ "cp `./bin/just-mr.py --always_file -C %s --local_build_root=%s setup just` %s"
+ % (REPOS, LOCAL_ROOT, CONF_FILE)],
+ cwd=src_wrkdir)
+ GRAPH = os.path.join(WRKDIR, "graph.json")
+ TO_BUILD = os.path.join(WRKDIR, "to_build.json")
+ run([bootstrap_just, "analyse",
+ "-C", CONF_FILE,
+ "--dump_graph", GRAPH,
+ "--dump_artifacts_to_build", TO_BUILD,
+ MAIN_MODULE, MAIN_TARGET],
+ cwd=src_wrkdir)
+ run(["./bin/bootstrap-traverser.py",
+ "-C", CONF_FILE,
+ "--default_workspace", src_wrkdir,
+ GRAPH, TO_BUILD],
+ cwd=src_wrkdir)
+ OUT = os.path.join(WRKDIR, "out")
+ run(["./out-boot/%s" % (MAIN_STAGE,),
+ "install", "-C", CONF_FILE,
+ "-o", OUT, "--local_build_root", LOCAL_ROOT,
+ MAIN_MODULE, MAIN_TARGET],
+ cwd=src_wrkdir)
+
+
+
+def main(args):
+ global SRCDIR
+ global WRKDIR
+ global DISTDIR
+ if len(args) > 1:
+ SRCDIR = os.path.abspath(args[1])
+ if len(args) > 2:
+ WRKDIR = os.path.abspath(args[2])
+
+ if not WRKDIR:
+ WRKDIR = tempfile.mkdtemp()
+ if not DISTDIR:
+ DISTDIR = [os.path.join(Path.home(), ".distfiles")]
+ bootstrap()
+
+if __name__ == "__main__":
+ # Parse options, set DISTDIR
+ main(sys.argv)
diff --git a/bin/just-mr.py b/bin/just-mr.py
new file mode 100755
index 00000000..f71bbefe
--- /dev/null
+++ b/bin/just-mr.py
@@ -0,0 +1,523 @@
+#!/usr/bin/env python3
+
+import hashlib
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+from optparse import OptionParser
+from pathlib import Path
+
+JUST="just"
+ROOT="/justroot"
+DISTDIR=[]
+
+ALWAYS_FILE=False
+
+GIT_CHECKOUT_LOCATIONS={}
+
+TAKE_OVER= [
+ "bindings",
+ "target_file_name",
+ "index_file_name",
+ "rule_file_name",
+ "expression_file_name",
+]
+ALT_DIRS=[
+ "target_root",
+ "rule_root",
+ "expression_root",
+ "index_root",
+]
+
+GIT_NOBODY_ENV ={
+ "GIT_AUTHOR_DATE": "1970-01-01T00:00Z",
+ "GIT_AUTHOR_NAME": "Nobody",
+ "GIT_AUTHOR_EMAIL": "nobody@example.org",
+ "GIT_COMMITTER_DATE": "1970-01-01T00:00Z",
+ "GIT_COMMITTER_NAME": "Nobody",
+ "GIT_COMMITTER_EMAIL": "nobody@example.org",
+}
+
+
+def log(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+def fail(s):
+ log(s)
+ sys.exit(1)
+
+def run_cmd(cmd, *, env=None, cwd):
+ result = subprocess.run(
+ cmd, cwd=cwd, env=env,
+ stdout=subprocess.DEVNULL)
+ if result.returncode != 0:
+ fail("Command %s in %s failed"
+ % (cmd, cwd))
+
+def read_config(configfile):
+ if configfile:
+ with open(configfile) as f:
+ return json.load(f)
+ default_config = os.path.join(Path.home(), ".just-repos.json")
+
+ if os.path.isfile(default_config):
+ with open(default_config) as f:
+ return json.load(f)
+
+ return {}
+
+def git_root(*, upstream):
+ if upstream in GIT_CHECKOUT_LOCATIONS:
+ return GIT_CHECKOUT_LOCATIONS[upstream]
+ else:
+ return os.path.join(ROOT, "git")
+
+def git_keep(commit, *, upstream):
+ if upstream in GIT_CHECKOUT_LOCATIONS:
+ # for those, we assume the referenced commit is kept by
+ # some branch anyway
+ return
+ run_cmd(
+ ["git", "tag", "-f", "-m", "Keep referenced tree alive",
+ "keep-%s" % (commit,), commit],
+ cwd=git_root(upstream=upstream),
+ env = dict(os.environ, **GIT_NOBODY_ENV),
+ )
+
+def git_init_options(*, upstream):
+ if upstream in GIT_CHECKOUT_LOCATIONS:
+ return []
+ else:
+ return ["--bare"]
+
+def ensure_git(*, upstream):
+ root = git_root(upstream=upstream)
+ if os.path.exists(root):
+ return
+ os.makedirs(root)
+ run_cmd(["git", "init"] + git_init_options(upstream=upstream),
+ cwd=root)
+
+def git_commit_present(commit, *, upstream):
+ result = subprocess.run(["git", "show", "--oneline", commit],
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ cwd=git_root(upstream=upstream))
+ return result.returncode == 0
+
+def git_url_is_path(url):
+ for prefix in ["ssh://", "http://", "https://"]:
+ if url.startswith(prefix):
+ return False
+ return True
+
+def git_fetch(*, repo, branch):
+ if git_url_is_path(repo):
+ repo = os.path.abspath(repo)
+ run_cmd(["git", "fetch", repo, branch], cwd=git_root(upstream=repo))
+
+def subdir_path(checkout, desc):
+ return os.path.normpath(os.path.join(checkout, desc.get("subdir", ".")))
+
+def git_tree(*, commit, subdir, upstream):
+ tree = subprocess.run(["git", "log", "-n", "1", "--format=%T", commit],
+ stdout=subprocess.PIPE,
+ cwd=git_root(upstream=upstream)).stdout.decode('utf-8').strip()
+ return git_subtree(tree=tree, subdir=subdir, upstream=upstream)
+
+def git_subtree(*, tree, subdir, upstream):
+ if subdir == ".":
+ return tree
+ return subprocess.Popen(
+ ["git", "cat-file", "--batch-check=%(objectname)"],
+ stdout=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ cwd=git_root(upstream=upstream)
+ ).communicate(input=("%s:%s" % (tree, subdir)).encode())[0].decode('utf-8').strip()
+
+def git_checkout_dir(commit):
+ return os.path.join(ROOT, "workspaces", "git", commit)
+
+def git_checkout(desc):
+ commit = desc["commit"]
+ target = git_checkout_dir(commit)
+ if ALWAYS_FILE and os.path.exists(target):
+ return ["file", subdir_path(target, desc)]
+ repo=desc["repository"]
+ root = git_root(upstream=repo)
+ ensure_git(upstream=repo)
+ if not git_commit_present(commit, upstream=repo):
+ branch=desc["branch"]
+ log("Fetching %s from %s (in %s)" % (branch, repo, root))
+ git_fetch(repo=repo, branch=branch)
+ if not git_commit_present(commit, upstream=repo):
+ fail("Fetching %s from %s failed to fetch %s"
+ % (branch, repo, commit))
+ git_keep(commit, upstream=repo)
+ if ALWAYS_FILE:
+ os.makedirs(target)
+ with tempfile.TemporaryFile() as f:
+ run_cmd(["git", "archive", commit],
+ cwd=root, stdout=f)
+ f.seek(0)
+ run_cmd(["tar", "x"], cwd=target, stdin=f)
+ return ["file", subdir_path(target, desc)]
+ tree = git_tree(commit=commit, subdir=desc.get("subdir", "."), upstream=repo)
+ return ["git tree", tree, root]
+
+def update_git(desc):
+ repo=desc["repository"]
+ branch=desc["branch"]
+ lsremote = subprocess.run(["git", "ls-remote", repo, branch],
+ stdout=subprocess.PIPE).stdout
+ desc["commit"] = lsremote.decode('utf-8').split('\t')[0]
+
+def git_hash(content):
+ header = "blob {}\0".format(len(content)).encode('utf-8')
+ h = hashlib.sha1()
+ h.update(header)
+ h.update(content)
+ return h.hexdigest()
+
+def add_to_cas(data):
+ if isinstance(data, str):
+ data = data.encode('utf-8')
+ cas_root = os.path.join(ROOT,"casf")
+ basename = git_hash(data)
+ target = os.path.join(cas_root, basename)
+ tempname = os.path.join(cas_root, "%s.%d" % (basename, os.getpid()))
+
+ if os.path.exists(target):
+ return target
+
+ os.makedirs(cas_root, exist_ok=True)
+ with open(tempname, "wb") as f:
+ f.write(data)
+ f.flush()
+ os.fsync(f.fileno())
+ os.rename(tempname, target)
+ return target
+
+def cas_path(h):
+ return os.path.join(ROOT, "casf", h)
+
+def is_in_cas(h):
+ return os.path.exists(cas_path(h))
+
+def add_file_to_cas(filename):
+ # TODO: avoid going through memory
+ with open(filename, "rb") as f:
+ data = f.read()
+ add_to_cas(data)
+
+def add_distfile_to_cas(distfile):
+ for d in DISTDIR:
+ candidate = os.path.join(d, distfile)
+ if os.path.exists(candidate):
+ add_file_to_cas(candidate)
+
+def archive_checkout_dir(content, repo_type):
+ return os.path.join(ROOT, "workspaces", repo_type, content)
+
+def archive_tmp_checkout_dir(content, repo_type):
+ return os.path.join(ROOT, "tmp-workspaces", repo_type, content)
+
+def archive_tree_id_file(content, repo_type):
+ return os.path.join(ROOT, "tree-map", repo_type, content)
+
+def archive_checkout(desc, repo_type="archive", *, fetch_only=False):
+ content_id = desc["content"]
+ target = archive_checkout_dir(content_id, repo_type=repo_type)
+ if ALWAYS_FILE and os.path.exists(target):
+ return ["file", subdir_path(target, desc)]
+ tree_id_file = archive_tree_id_file(content_id, repo_type=repo_type)
+ if (not ALWAYS_FILE) and os.path.exists(tree_id_file):
+ with open(tree_id_file) as f:
+ archive_tree_id = f.read()
+ return [
+ "git tree",
+ git_subtree(tree=archive_tree_id, subdir=desc.get("subdir", "."),
+ upstream=None),
+ git_root(upstream=None),
+ ]
+ if not is_in_cas(content_id):
+ distfile = desc.get("distfile")
+ if not distfile:
+ distfile = os.path.basename(desc.get("fetch"))
+ if distfile:
+ add_distfile_to_cas(distfile)
+ if not is_in_cas(content_id):
+ url = desc["fetch"]
+ data = subprocess.run(["wget", "-O", "-", url], stdout=subprocess.PIPE).stdout
+ add_to_cas(data)
+ if not is_in_cas(content_id):
+ fail("Failed to fetch a file with id %s from %s" % (content_id, url))
+ if fetch_only:
+ return
+ if not ALWAYS_FILE:
+ target = archive_tmp_checkout_dir(content_id, repo_type=repo_type)
+ os.makedirs(target)
+ if repo_type == "zip":
+ run_cmd(["unzip", "-d", ".", cas_path(content_id)], cwd=target)
+ else:
+ run_cmd(["tar", "xf", cas_path(content_id)], cwd=target)
+ if ALWAYS_FILE:
+ return ["file", subdir_path(target, desc)]
+ run_cmd(["git", "init"], cwd=target)
+ run_cmd(["git", "add", "."], cwd=target)
+ run_cmd(
+ ["git", "commit", "-m", "Content of %s %r" % (repo_type, content_id)],
+ cwd=target,
+ env=dict(os.environ, **GIT_NOBODY_ENV),
+ )
+
+ ensure_git(upstream=None)
+ run_cmd(["git", "fetch", target],
+ cwd=git_root(upstream=None))
+ commit = subprocess.run(["git", "log", "-n", "1", "--format=%H"],
+ stdout=subprocess.PIPE,
+ cwd=target).stdout.decode('utf-8').strip()
+ git_keep(commit, upstream=None)
+ tree = subprocess.run(["git", "log", "-n", "1", "--format=%T"],
+ stdout=subprocess.PIPE,
+ cwd=target).stdout.decode('utf-8').strip()
+ shutil.rmtree(target)
+ os.makedirs(os.path.dirname(tree_id_file), exist_ok=True)
+ with open(tree_id_file, "w") as f:
+ f.write(tree)
+ return ["git tree",
+ git_subtree(tree=tree, subdir=desc.get("subdir", "."), upstream=None),
+ git_root(upstream=None)]
+
+def describe_file(desc):
+ fpath = desc["path"]
+ return ["file", os.path.abspath(fpath)]
+
+def resolve_repo(desc, *, seen=None, repos):
+ seen = seen or []
+ if not isinstance(desc, str):
+ return desc
+ if desc in seen:
+ fail("Cyclic reference in repository source definition: %r" % (seen,))
+ return resolve_repo(repos[desc]["repository"],
+ seen = seen + [desc], repos=repos)
+
+def checkout(desc, *, name, repos):
+ repo_desc = resolve_repo(desc, repos=repos)
+ repo_type = repo_desc.get("type")
+ if repo_type == "git":
+ return git_checkout(repo_desc)
+ if repo_type in ["archive", "zip"]:
+ return archive_checkout(repo_desc, repo_type=repo_type)
+ if repo_type == "file":
+ return describe_file(repo_desc)
+ fail("Unknown repository type %s for %s"
+ % (repo_type, name))
+
+def reachable_repositories(repo, *, repos):
+ # First compute the set of repositories transitively reachable via bindings
+ reachable = set()
+
+ def traverse(x):
+ nonlocal reachable
+ if x in reachable:
+ return
+ reachable.add(x)
+ bindings = repos[x].get("bindings", {})
+ for bound in bindings.values():
+ traverse(bound)
+
+ traverse(repo)
+
+ # Now add the repositories that serve as overlay directories for
+ # targets, rules, etc. Those repositories have to be fetched as well, but
+ # we do not have to consider their bindings.
+ to_fetch = reachable.copy()
+ for x in reachable:
+ for layer in ALT_DIRS:
+ if layer in repos[x]:
+ to_fetch.add(repos[x][layer])
+
+ return reachable, to_fetch
+
+def setup(*, config, args, interactive=False):
+ repos = config.get("repositories", {})
+ repos_to_setup = repos.keys()
+ repos_to_include = repos.keys()
+ mr_config = {}
+ main = None
+
+ if args:
+ if len(args) > 1:
+ fail("Usage: %s setup [<main repo>]"
+ % (sys.argv[0], ))
+ main = args[0]
+ repos_to_include, repos_to_setup = reachable_repositories(main,
+ repos=repos)
+ mr_config["main"] = main
+
+ mr_repos = {}
+ for repo in repos_to_setup:
+ desc = repos[repo]
+ if repo == main and interactive:
+ config = {}
+ else:
+ workspace = checkout(desc.get("repository", {}), name=repo, repos=repos)
+ config = { "workspace_root": workspace }
+ for key in TAKE_OVER:
+ val = desc.get(key, {})
+ if val:
+ config[key] = val
+ mr_repos[repo] = config
+ # Alternate directories are specifies as the workspace of
+ # some other repository. So we have to iterate over all repositories again
+ # to add those directories. We do this only for the repositories we include
+ # in the final configuration.
+ for repo in repos_to_include:
+ desc = repos[repo]
+ if repo == main and interactive:
+ continue
+ for key in ALT_DIRS:
+ val = desc.get(key, {})
+ if val:
+ if val == main and interactive:
+ continue
+ mr_repos[repo][key] = mr_repos[val]["workspace_root"]
+ mr_repos_actual = {}
+ for repo in repos_to_include:
+ mr_repos_actual[repo] = mr_repos[repo]
+ mr_config["repositories"] = mr_repos_actual
+
+ return add_to_cas(json.dumps(mr_config, indent=2, sort_keys=True))
+
+def build(*, config, args):
+ if len(args) != 3:
+ fail("Usage: %s build <repo> <moudle> <target>" % (sys.argv[0],))
+ config = setup(config=config, args=[args[0]])
+ cmd = [JUST, "build", "-C", config, "--local_build_root", ROOT,
+ args[1], args[2]]
+ log("Setup finished, exec %s" % (cmd,))
+ os.execvp(JUST, cmd)
+
+def install(*, config, args):
+ if len(args) != 4:
+ fail("Usage: %s install <repo> <moudle> <target> <install-path>" % (sys.argv[0],))
+ config = setup(config=config, args=[args[0]])
+ cmd = [JUST, "install", "-C", config, "--local_build_root", ROOT,
+ "-o", args[3], args[1], args[2]]
+ log("Setup finished, exec %s" % (cmd,))
+ os.execvp(JUST, cmd)
+
+def update(*, config, args):
+ for repo in args:
+ desc = config["repositories"][repo]["repository"]
+ desc = resolve_repo(desc, repos=config["repositories"])
+ repo_type = desc.get("type")
+ if repo_type == "git":
+ update_git(desc)
+ else:
+ fail("Don't know how to update %s repositories" % (repo_type,))
+ print(json.dumps(config, indent=2))
+ sys.exit(0)
+
+def fetch(*, config, args):
+ if args:
+ print("Warning: ignoring arguments %r" % (args,))
+ fetch_dir = None
+ for d in DISTDIR:
+ if os.path.isdir(d):
+ fetch_dir = os.path.abspath(d)
+ break
+ if not fetch_dir:
+ print("No directory found to fetch to, considered %r" % (DISTDIR,))
+ sys.exit(1)
+ print("Fetching to %r" % (fetch_dir,))
+
+ repos = config["repositories"]
+ for repo, desc in repos.items():
+ if ("repository" in desc and isinstance(desc["repository"], dict)
+ and desc["repository"]["type"] in ["zip", "archive"]):
+ repo_desc = desc["repository"]
+ distfile = repo_desc.get("distfile") or os.path.basename(repo_desc["fetch"])
+ content = repo_desc["content"]
+ print("%r --> %r (content: %s)" % (repo, distfile, content))
+ archive_checkout(repo_desc, repo_desc["type"], fetch_only=True)
+ shutil.copyfile(cas_path(content), os.path.join(fetch_dir, distfile))
+
+ sys.exit(0)
+
+
+def main():
+ parser = OptionParser()
+ parser.add_option("-C", dest="repository_config",
+ help="Repository-description file to use",
+ metavar="FILE")
+ parser.add_option("-L", dest="checkout_location",
+ help="Specification file for checkout locations")
+ parser.add_option("--local_build_root", dest="local_build_root",
+ help="Root for CAS, repository space, etc",
+ metavar="PATH")
+ parser.add_option("--distdir", dest="distdir", action="append",
+ help="Directory to look for distfiles before fetching",
+ metavar="PATH")
+ parser.add_option("--just", dest="just",
+ help="Path to the just binary",
+ metavar="PATH")
+ parser.add_option("--always_file", dest="always_file", action="store_true",
+ default=False, help="Always create file roots")
+
+ (options, args) = parser.parse_args()
+ config = read_config(options.repository_config)
+ global ROOT
+ ROOT = options.local_build_root or os.path.join(Path.home(), ".cache/just")
+ global GIT_CHECKOUT_LOCATIONS
+ if options.checkout_location:
+ with open(options.checkout_location) as f:
+ GIT_CHECKOUT_LOCATIONS = json.load(f).get("checkouts",{}).get("git", {})
+ elif os.path.isfile(os.path.join(Path().home(), ".just-local.json")):
+ with open(os.path.join(Path().home(), ".just-local.json")) as f:
+ GIT_CHECKOUT_LOCATIONS = json.load(f).get("checkouts",{}).get("git", {})
+ global DISTDIR
+ if options.distdir:
+ DISTDIR = options.distdir
+
+ DISTDIR.append(os.path.join(Path.home(), ".distfiles"))
+
+ global JUST
+ if options.just:
+ JUST=os.path.abspath(options.just)
+
+ global ALWAYS_FILE
+ ALWAYS_FILE=options.always_file
+
+ if not args:
+ fail("Usage: %s <cmd> [<args>]" % (sys.argv[0],))
+ if args[0] == "setup":
+ # Setup for interactive use, i.e., fetch the required repositories
+ # and generate an appropriate multi-repository configuration file.
+ # Store it in the CAS and print its path on stdout.
+ #
+ # For the main repository (if specified), leave out the workspace
+ # so that in the usage of just the workspace is determined from
+ # the working directory; in this way, working on a checkout of that
+ # repository is possible, while having all dependencies set up
+ # correctly.
+ print(setup(config=config, args=args[1:], interactive=True))
+ return
+ if args[0] == "build":
+ build(config=config, args=args[1:])
+ if args[0] == "install":
+ install(config=config, args=args[1:])
+ if args[0] == "update":
+ update(config=config, args=args[1:])
+ if args[0] == "fetch":
+ fetch(config=config, args=args[1:])
+ fail("Unknown subcommand %s" % (args[0],))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/etc/defaults/CC/TARGETS b/etc/defaults/CC/TARGETS
new file mode 100644
index 00000000..d9f206c4
--- /dev/null
+++ b/etc/defaults/CC/TARGETS
@@ -0,0 +1,16 @@
+{ "defaults":
+ { "type": ["CC", "defaults"]
+ , "CC": ["clang"]
+ , "CXX": ["clang++"]
+ , "CFLAGS": []
+ , "CXXFLAGS":
+ [ "-std=c++20"
+ , "-Wall"
+ , "-Wextra"
+ , "-Wpedantic"
+ , "-Wsign-conversion"
+ ]
+ , "AR": ["ar"]
+ , "PATH": ["/bin", "/sbin", "/usr/bin", "/usr/sbin"]
+ }
+}
diff --git a/etc/defaults/CC/TARGETS.boringssl b/etc/defaults/CC/TARGETS.boringssl
new file mode 100644
index 00000000..c570b5d2
--- /dev/null
+++ b/etc/defaults/CC/TARGETS.boringssl
@@ -0,0 +1,70 @@
+{ "defaults":
+ { "type": ["CC", "defaults"]
+ , "arguments_config": ["OS", "ARCH", "TARGET_ARCH"]
+ , "CC": ["clang"]
+ , "CXX": ["clang++"]
+ , "CFLAGS":
+ { "type": "let*"
+ , "bindings":
+ [ [ "PLATFORM"
+ , { "type": "join"
+ , "separator": "_"
+ , "$1":
+ [ {"type": "var", "name": "OS"}
+ , { "type": "var"
+ , "name": "TARGET_ARCH"
+ , "default": {"type": "var", "name": "ARCH"}
+ }
+ ]
+ }
+ ]
+ , [ "posix_copts"
+ , [ "-Wa,--noexecstack"
+ , "-D_XOPEN_SOURCE=700"
+ , "-Wall"
+ , "-Werror"
+ , "-Wformat=2"
+ , "-Wsign-compare"
+ , "-Wmissing-field-initializers"
+ , "-Wwrite-strings"
+ , "-Wshadow"
+ , "-fno-common"
+ ]
+ ]
+ ]
+ , "body":
+ { "type": "cond"
+ , "cond":
+ [ [ { "type": "or"
+ , "$1":
+ [ { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "linux_ppc64le"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "linux_x86_64"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "mac_x86_64"
+ }
+ ]
+ }
+ , {"type": "var", "name": "posix_copts"}
+ ]
+ , [ { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "windows_x86_64"
+ }
+ , ["-DWIN32_LEAN_AND_MEAN", "-DOPENSSL_NO_ASM"]
+ ]
+ ]
+ , "default": ["-DOPENSSL_NO_ASM"]
+ }
+ }
+ , "CXXFLAGS": []
+ , "AR": ["ar"]
+ , "PATH": ["/bin", "/sbin", "/usr/bin", "/usr/sbin"]
+ }
+}
diff --git a/etc/defaults/CC/TARGETS.nowerror b/etc/defaults/CC/TARGETS.nowerror
new file mode 100644
index 00000000..ecc58daf
--- /dev/null
+++ b/etc/defaults/CC/TARGETS.nowerror
@@ -0,0 +1,12 @@
+{ "defaults":
+ { "type": ["CC", "defaults"]
+ , "CC": ["clang"]
+ , "CXX": ["clang++"]
+ , "CFLAGS": []
+ , "CXXFLAGS":
+ [ "-std=c++20"
+ ]
+ , "AR": ["ar"]
+ , "PATH": ["/bin", "/sbin", "/usr/bin", "/usr/sbin"]
+ }
+}
diff --git a/etc/defaults/CC/TARGETS.protobuf b/etc/defaults/CC/TARGETS.protobuf
new file mode 100644
index 00000000..b808ea51
--- /dev/null
+++ b/etc/defaults/CC/TARGETS.protobuf
@@ -0,0 +1,19 @@
+{ "defaults":
+ { "type": ["CC", "defaults"]
+ , "CC": ["clang"]
+ , "CXX": ["clang++"]
+ , "CFLAGS": []
+ , "CXXFLAGS":
+ [ "-std=c++20"
+ , "-DHAVE_PTHREAD"
+ , "-DHAVE_ZLIB"
+ , "-Woverloaded-virtual"
+ , "-Wno-sign-compare"
+ , "-Wno-unused-function"
+ , "-Wno-write-strings"
+ , "-Wno-deprecated-declarations"
+ ]
+ , "AR": ["ar"]
+ , "PATH": ["/bin", "/sbin", "/usr/bin", "/usr/sbin"]
+ }
+}
diff --git a/etc/defaults/CC/test/TARGETS b/etc/defaults/CC/test/TARGETS
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/etc/defaults/CC/test/TARGETS
@@ -0,0 +1 @@
+{}
diff --git a/etc/import/TARGETS.bazel_remote_apis b/etc/import/TARGETS.bazel_remote_apis
new file mode 100644
index 00000000..3a51bd53
--- /dev/null
+++ b/etc/import/TARGETS.bazel_remote_apis
@@ -0,0 +1,19 @@
+{ "semver_proto":
+ { "type": ["@", "rules", "proto", "library"]
+ , "name": ["semver_proto"]
+ , "srcs": ["build/bazel/semver/semver.proto"]
+ }
+, "remote_execution_proto":
+ { "type": ["@", "rules", "proto", "library"]
+ , "name": ["remote_execution_proto"]
+ , "service": ["yes"]
+ , "srcs": ["build/bazel/remote/execution/v2/remote_execution.proto"]
+ , "deps":
+ [ "semver_proto"
+ , ["@", "google_apis", "", "google_api_annotations_proto"]
+ , ["@", "google_apis", "", "google_api_http_proto"]
+ , ["@", "google_apis", "", "google_longrunning_operations_proto"]
+ , ["@", "google_apis", "", "google_rpc_status_proto"]
+ ]
+ }
+}
diff --git a/etc/import/TARGETS.boringssl b/etc/import/TARGETS.boringssl
new file mode 100644
index 00000000..5011581c
--- /dev/null
+++ b/etc/import/TARGETS.boringssl
@@ -0,0 +1,682 @@
+{ "crypto":
+ { "type": "export"
+ , "target": "crypto-lib"
+ , "flexible_config":
+ ["OS", "ARCH", "TARGET_ARCH", "CC", "CFLAGS", "ENV", "AR"]
+ }
+, "ssl":
+ { "type": "export"
+ , "target": "ssl-lib"
+ , "flexible_config":
+ ["OS", "ARCH", "TARGET_ARCH", "CC", "CFLAGS", "ENV", "AR"]
+ }
+, "crypto-lib":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["crypto"]
+ , "hdrs": [["./", "src/include/openssl", "crypto_headers"]]
+ , "private-hdrs": ["fips_fragments", "crypto_internal_headers"]
+ , "srcs": ["crypto_sources", "crypto_sources_asm"]
+ , "pure C": ["YES"]
+ }
+, "ssl-lib":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["ssl"]
+ , "hdrs": [["./", "src/include/openssl", "ssl_headers"]]
+ , "private-hdrs": ["ssl_internal_headers", "crypto_internal_headers"]
+ , "srcs": ["ssl_sources"]
+ , "deps": ["crypto-lib"]
+ }
+, "crypto_internal_headers":
+ { "type": "install"
+ , "deps":
+ [ "src/crypto/asn1/asn1_locl.h"
+ , "src/crypto/bio/internal.h"
+ , "src/crypto/bytestring/internal.h"
+ , "src/crypto/chacha/internal.h"
+ , "src/crypto/cipher_extra/internal.h"
+ , "src/crypto/conf/conf_def.h"
+ , "src/crypto/conf/internal.h"
+ , "src/crypto/cpu-arm-linux.h"
+ , "src/crypto/curve25519/curve25519_tables.h"
+ , "src/crypto/curve25519/internal.h"
+ , "src/crypto/ec_extra/internal.h"
+ , "src/crypto/err/internal.h"
+ , "src/crypto/evp/internal.h"
+ , "src/crypto/fipsmodule/aes/internal.h"
+ , "src/crypto/fipsmodule/bn/internal.h"
+ , "src/crypto/fipsmodule/bn/rsaz_exp.h"
+ , "src/crypto/fipsmodule/cipher/internal.h"
+ , "src/crypto/fipsmodule/delocate.h"
+ , "src/crypto/fipsmodule/des/internal.h"
+ , "src/crypto/fipsmodule/digest/internal.h"
+ , "src/crypto/fipsmodule/digest/md32_common.h"
+ , "src/crypto/fipsmodule/ec/internal.h"
+ , "src/crypto/fipsmodule/ec/p256-x86_64-table.h"
+ , "src/crypto/fipsmodule/ec/p256-x86_64.h"
+ , "src/crypto/fipsmodule/ec/p256_table.h"
+ , "src/crypto/fipsmodule/md5/internal.h"
+ , "src/crypto/fipsmodule/modes/internal.h"
+ , "src/crypto/fipsmodule/rand/fork_detect.h"
+ , "src/crypto/fipsmodule/rand/getrandom_fillin.h"
+ , "src/crypto/fipsmodule/rand/internal.h"
+ , "src/crypto/fipsmodule/rsa/internal.h"
+ , "src/crypto/fipsmodule/sha/internal.h"
+ , "src/crypto/fipsmodule/tls/internal.h"
+ , "src/crypto/hrss/internal.h"
+ , "src/crypto/internal.h"
+ , "src/crypto/obj/obj_dat.h"
+ , "src/crypto/pkcs7/internal.h"
+ , "src/crypto/pkcs8/internal.h"
+ , "src/crypto/poly1305/internal.h"
+ , "src/crypto/pool/internal.h"
+ , "src/crypto/trust_token/internal.h"
+ , "src/crypto/x509/charmap.h"
+ , "src/crypto/x509/internal.h"
+ , "src/crypto/x509/vpm_int.h"
+ , "src/crypto/x509v3/ext_dat.h"
+ , "src/crypto/x509v3/internal.h"
+ , "src/crypto/x509v3/pcy_int.h"
+ , "src/third_party/fiat/curve25519_32.h"
+ , "src/third_party/fiat/curve25519_64.h"
+ , "src/third_party/fiat/p256_32.h"
+ , "src/third_party/fiat/p256_64.h"
+ ]
+ }
+, "crypto_sources":
+ { "type": "install"
+ , "deps":
+ [ "err_data.c"
+ , "src/crypto/asn1/a_bitstr.c"
+ , "src/crypto/asn1/a_bool.c"
+ , "src/crypto/asn1/a_d2i_fp.c"
+ , "src/crypto/asn1/a_dup.c"
+ , "src/crypto/asn1/a_enum.c"
+ , "src/crypto/asn1/a_gentm.c"
+ , "src/crypto/asn1/a_i2d_fp.c"
+ , "src/crypto/asn1/a_int.c"
+ , "src/crypto/asn1/a_mbstr.c"
+ , "src/crypto/asn1/a_object.c"
+ , "src/crypto/asn1/a_octet.c"
+ , "src/crypto/asn1/a_print.c"
+ , "src/crypto/asn1/a_strnid.c"
+ , "src/crypto/asn1/a_time.c"
+ , "src/crypto/asn1/a_type.c"
+ , "src/crypto/asn1/a_utctm.c"
+ , "src/crypto/asn1/a_utf8.c"
+ , "src/crypto/asn1/asn1_lib.c"
+ , "src/crypto/asn1/asn1_par.c"
+ , "src/crypto/asn1/asn_pack.c"
+ , "src/crypto/asn1/f_enum.c"
+ , "src/crypto/asn1/f_int.c"
+ , "src/crypto/asn1/f_string.c"
+ , "src/crypto/asn1/tasn_dec.c"
+ , "src/crypto/asn1/tasn_enc.c"
+ , "src/crypto/asn1/tasn_fre.c"
+ , "src/crypto/asn1/tasn_new.c"
+ , "src/crypto/asn1/tasn_typ.c"
+ , "src/crypto/asn1/tasn_utl.c"
+ , "src/crypto/asn1/time_support.c"
+ , "src/crypto/base64/base64.c"
+ , "src/crypto/bio/bio.c"
+ , "src/crypto/bio/bio_mem.c"
+ , "src/crypto/bio/connect.c"
+ , "src/crypto/bio/fd.c"
+ , "src/crypto/bio/file.c"
+ , "src/crypto/bio/hexdump.c"
+ , "src/crypto/bio/pair.c"
+ , "src/crypto/bio/printf.c"
+ , "src/crypto/bio/socket.c"
+ , "src/crypto/bio/socket_helper.c"
+ , "src/crypto/bn_extra/bn_asn1.c"
+ , "src/crypto/bn_extra/convert.c"
+ , "src/crypto/buf/buf.c"
+ , "src/crypto/bytestring/asn1_compat.c"
+ , "src/crypto/bytestring/ber.c"
+ , "src/crypto/bytestring/cbb.c"
+ , "src/crypto/bytestring/cbs.c"
+ , "src/crypto/bytestring/unicode.c"
+ , "src/crypto/chacha/chacha.c"
+ , "src/crypto/cipher_extra/cipher_extra.c"
+ , "src/crypto/cipher_extra/derive_key.c"
+ , "src/crypto/cipher_extra/e_aesccm.c"
+ , "src/crypto/cipher_extra/e_aesctrhmac.c"
+ , "src/crypto/cipher_extra/e_aesgcmsiv.c"
+ , "src/crypto/cipher_extra/e_chacha20poly1305.c"
+ , "src/crypto/cipher_extra/e_null.c"
+ , "src/crypto/cipher_extra/e_rc2.c"
+ , "src/crypto/cipher_extra/e_rc4.c"
+ , "src/crypto/cipher_extra/e_tls.c"
+ , "src/crypto/cipher_extra/tls_cbc.c"
+ , "src/crypto/cmac/cmac.c"
+ , "src/crypto/conf/conf.c"
+ , "src/crypto/cpu-aarch64-fuchsia.c"
+ , "src/crypto/cpu-aarch64-linux.c"
+ , "src/crypto/cpu-arm-linux.c"
+ , "src/crypto/cpu-arm.c"
+ , "src/crypto/cpu-intel.c"
+ , "src/crypto/cpu-ppc64le.c"
+ , "src/crypto/crypto.c"
+ , "src/crypto/curve25519/curve25519.c"
+ , "src/crypto/curve25519/spake25519.c"
+ , "src/crypto/dh/check.c"
+ , "src/crypto/dh/dh.c"
+ , "src/crypto/dh/dh_asn1.c"
+ , "src/crypto/dh/params.c"
+ , "src/crypto/digest_extra/digest_extra.c"
+ , "src/crypto/dsa/dsa.c"
+ , "src/crypto/dsa/dsa_asn1.c"
+ , "src/crypto/ec_extra/ec_asn1.c"
+ , "src/crypto/ec_extra/ec_derive.c"
+ , "src/crypto/ec_extra/hash_to_curve.c"
+ , "src/crypto/ecdh_extra/ecdh_extra.c"
+ , "src/crypto/ecdsa_extra/ecdsa_asn1.c"
+ , "src/crypto/engine/engine.c"
+ , "src/crypto/err/err.c"
+ , "src/crypto/evp/digestsign.c"
+ , "src/crypto/evp/evp.c"
+ , "src/crypto/evp/evp_asn1.c"
+ , "src/crypto/evp/evp_ctx.c"
+ , "src/crypto/evp/p_dsa_asn1.c"
+ , "src/crypto/evp/p_ec.c"
+ , "src/crypto/evp/p_ec_asn1.c"
+ , "src/crypto/evp/p_ed25519.c"
+ , "src/crypto/evp/p_ed25519_asn1.c"
+ , "src/crypto/evp/p_rsa.c"
+ , "src/crypto/evp/p_rsa_asn1.c"
+ , "src/crypto/evp/p_x25519.c"
+ , "src/crypto/evp/p_x25519_asn1.c"
+ , "src/crypto/evp/pbkdf.c"
+ , "src/crypto/evp/print.c"
+ , "src/crypto/evp/scrypt.c"
+ , "src/crypto/evp/sign.c"
+ , "src/crypto/ex_data.c"
+ , "src/crypto/fipsmodule/bcm.c"
+ , "src/crypto/fipsmodule/fips_shared_support.c"
+ , "src/crypto/fipsmodule/is_fips.c"
+ , "src/crypto/hkdf/hkdf.c"
+ , "src/crypto/hrss/hrss.c"
+ , "src/crypto/lhash/lhash.c"
+ , "src/crypto/mem.c"
+ , "src/crypto/obj/obj.c"
+ , "src/crypto/obj/obj_xref.c"
+ , "src/crypto/pem/pem_all.c"
+ , "src/crypto/pem/pem_info.c"
+ , "src/crypto/pem/pem_lib.c"
+ , "src/crypto/pem/pem_oth.c"
+ , "src/crypto/pem/pem_pk8.c"
+ , "src/crypto/pem/pem_pkey.c"
+ , "src/crypto/pem/pem_x509.c"
+ , "src/crypto/pem/pem_xaux.c"
+ , "src/crypto/pkcs7/pkcs7.c"
+ , "src/crypto/pkcs7/pkcs7_x509.c"
+ , "src/crypto/pkcs8/p5_pbev2.c"
+ , "src/crypto/pkcs8/pkcs8.c"
+ , "src/crypto/pkcs8/pkcs8_x509.c"
+ , "src/crypto/poly1305/poly1305.c"
+ , "src/crypto/poly1305/poly1305_arm.c"
+ , "src/crypto/poly1305/poly1305_vec.c"
+ , "src/crypto/pool/pool.c"
+ , "src/crypto/rand_extra/deterministic.c"
+ , "src/crypto/rand_extra/forkunsafe.c"
+ , "src/crypto/rand_extra/fuchsia.c"
+ , "src/crypto/rand_extra/rand_extra.c"
+ , "src/crypto/rand_extra/windows.c"
+ , "src/crypto/rc4/rc4.c"
+ , "src/crypto/refcount_c11.c"
+ , "src/crypto/refcount_lock.c"
+ , "src/crypto/rsa_extra/rsa_asn1.c"
+ , "src/crypto/rsa_extra/rsa_print.c"
+ , "src/crypto/siphash/siphash.c"
+ , "src/crypto/stack/stack.c"
+ , "src/crypto/thread.c"
+ , "src/crypto/thread_none.c"
+ , "src/crypto/thread_pthread.c"
+ , "src/crypto/thread_win.c"
+ , "src/crypto/trust_token/pmbtoken.c"
+ , "src/crypto/trust_token/trust_token.c"
+ , "src/crypto/x509/a_digest.c"
+ , "src/crypto/x509/a_sign.c"
+ , "src/crypto/x509/a_strex.c"
+ , "src/crypto/x509/a_verify.c"
+ , "src/crypto/x509/algorithm.c"
+ , "src/crypto/x509/asn1_gen.c"
+ , "src/crypto/x509/by_dir.c"
+ , "src/crypto/x509/by_file.c"
+ , "src/crypto/x509/i2d_pr.c"
+ , "src/crypto/x509/rsa_pss.c"
+ , "src/crypto/x509/t_crl.c"
+ , "src/crypto/x509/t_req.c"
+ , "src/crypto/x509/t_x509.c"
+ , "src/crypto/x509/t_x509a.c"
+ , "src/crypto/x509/x509.c"
+ , "src/crypto/x509/x509_att.c"
+ , "src/crypto/x509/x509_cmp.c"
+ , "src/crypto/x509/x509_d2.c"
+ , "src/crypto/x509/x509_def.c"
+ , "src/crypto/x509/x509_ext.c"
+ , "src/crypto/x509/x509_lu.c"
+ , "src/crypto/x509/x509_obj.c"
+ , "src/crypto/x509/x509_r2x.c"
+ , "src/crypto/x509/x509_req.c"
+ , "src/crypto/x509/x509_set.c"
+ , "src/crypto/x509/x509_trs.c"
+ , "src/crypto/x509/x509_txt.c"
+ , "src/crypto/x509/x509_v3.c"
+ , "src/crypto/x509/x509_vfy.c"
+ , "src/crypto/x509/x509_vpm.c"
+ , "src/crypto/x509/x509cset.c"
+ , "src/crypto/x509/x509name.c"
+ , "src/crypto/x509/x509rset.c"
+ , "src/crypto/x509/x509spki.c"
+ , "src/crypto/x509/x_algor.c"
+ , "src/crypto/x509/x_all.c"
+ , "src/crypto/x509/x_attrib.c"
+ , "src/crypto/x509/x_crl.c"
+ , "src/crypto/x509/x_exten.c"
+ , "src/crypto/x509/x_info.c"
+ , "src/crypto/x509/x_name.c"
+ , "src/crypto/x509/x_pkey.c"
+ , "src/crypto/x509/x_pubkey.c"
+ , "src/crypto/x509/x_req.c"
+ , "src/crypto/x509/x_sig.c"
+ , "src/crypto/x509/x_spki.c"
+ , "src/crypto/x509/x_val.c"
+ , "src/crypto/x509/x_x509.c"
+ , "src/crypto/x509/x_x509a.c"
+ , "src/crypto/x509v3/pcy_cache.c"
+ , "src/crypto/x509v3/pcy_data.c"
+ , "src/crypto/x509v3/pcy_lib.c"
+ , "src/crypto/x509v3/pcy_map.c"
+ , "src/crypto/x509v3/pcy_node.c"
+ , "src/crypto/x509v3/pcy_tree.c"
+ , "src/crypto/x509v3/v3_akey.c"
+ , "src/crypto/x509v3/v3_akeya.c"
+ , "src/crypto/x509v3/v3_alt.c"
+ , "src/crypto/x509v3/v3_bcons.c"
+ , "src/crypto/x509v3/v3_bitst.c"
+ , "src/crypto/x509v3/v3_conf.c"
+ , "src/crypto/x509v3/v3_cpols.c"
+ , "src/crypto/x509v3/v3_crld.c"
+ , "src/crypto/x509v3/v3_enum.c"
+ , "src/crypto/x509v3/v3_extku.c"
+ , "src/crypto/x509v3/v3_genn.c"
+ , "src/crypto/x509v3/v3_ia5.c"
+ , "src/crypto/x509v3/v3_info.c"
+ , "src/crypto/x509v3/v3_int.c"
+ , "src/crypto/x509v3/v3_lib.c"
+ , "src/crypto/x509v3/v3_ncons.c"
+ , "src/crypto/x509v3/v3_ocsp.c"
+ , "src/crypto/x509v3/v3_pci.c"
+ , "src/crypto/x509v3/v3_pcia.c"
+ , "src/crypto/x509v3/v3_pcons.c"
+ , "src/crypto/x509v3/v3_pku.c"
+ , "src/crypto/x509v3/v3_pmaps.c"
+ , "src/crypto/x509v3/v3_prn.c"
+ , "src/crypto/x509v3/v3_purp.c"
+ , "src/crypto/x509v3/v3_skey.c"
+ , "src/crypto/x509v3/v3_sxnet.c"
+ , "src/crypto/x509v3/v3_utl.c"
+ ]
+ }
+, "fips_fragments":
+ { "type": "install"
+ , "deps":
+ [ "src/crypto/fipsmodule/aes/aes.c"
+ , "src/crypto/fipsmodule/aes/aes_nohw.c"
+ , "src/crypto/fipsmodule/aes/key_wrap.c"
+ , "src/crypto/fipsmodule/aes/mode_wrappers.c"
+ , "src/crypto/fipsmodule/bn/add.c"
+ , "src/crypto/fipsmodule/bn/asm/x86_64-gcc.c"
+ , "src/crypto/fipsmodule/bn/bn.c"
+ , "src/crypto/fipsmodule/bn/bytes.c"
+ , "src/crypto/fipsmodule/bn/cmp.c"
+ , "src/crypto/fipsmodule/bn/ctx.c"
+ , "src/crypto/fipsmodule/bn/div.c"
+ , "src/crypto/fipsmodule/bn/div_extra.c"
+ , "src/crypto/fipsmodule/bn/exponentiation.c"
+ , "src/crypto/fipsmodule/bn/gcd.c"
+ , "src/crypto/fipsmodule/bn/gcd_extra.c"
+ , "src/crypto/fipsmodule/bn/generic.c"
+ , "src/crypto/fipsmodule/bn/jacobi.c"
+ , "src/crypto/fipsmodule/bn/montgomery.c"
+ , "src/crypto/fipsmodule/bn/montgomery_inv.c"
+ , "src/crypto/fipsmodule/bn/mul.c"
+ , "src/crypto/fipsmodule/bn/prime.c"
+ , "src/crypto/fipsmodule/bn/random.c"
+ , "src/crypto/fipsmodule/bn/rsaz_exp.c"
+ , "src/crypto/fipsmodule/bn/shift.c"
+ , "src/crypto/fipsmodule/bn/sqrt.c"
+ , "src/crypto/fipsmodule/cipher/aead.c"
+ , "src/crypto/fipsmodule/cipher/cipher.c"
+ , "src/crypto/fipsmodule/cipher/e_aes.c"
+ , "src/crypto/fipsmodule/cipher/e_des.c"
+ , "src/crypto/fipsmodule/des/des.c"
+ , "src/crypto/fipsmodule/digest/digest.c"
+ , "src/crypto/fipsmodule/digest/digests.c"
+ , "src/crypto/fipsmodule/ec/ec.c"
+ , "src/crypto/fipsmodule/ec/ec_key.c"
+ , "src/crypto/fipsmodule/ec/ec_montgomery.c"
+ , "src/crypto/fipsmodule/ec/felem.c"
+ , "src/crypto/fipsmodule/ec/oct.c"
+ , "src/crypto/fipsmodule/ec/p224-64.c"
+ , "src/crypto/fipsmodule/ec/p256-x86_64.c"
+ , "src/crypto/fipsmodule/ec/p256.c"
+ , "src/crypto/fipsmodule/ec/scalar.c"
+ , "src/crypto/fipsmodule/ec/simple.c"
+ , "src/crypto/fipsmodule/ec/simple_mul.c"
+ , "src/crypto/fipsmodule/ec/util.c"
+ , "src/crypto/fipsmodule/ec/wnaf.c"
+ , "src/crypto/fipsmodule/ecdh/ecdh.c"
+ , "src/crypto/fipsmodule/ecdsa/ecdsa.c"
+ , "src/crypto/fipsmodule/hmac/hmac.c"
+ , "src/crypto/fipsmodule/md4/md4.c"
+ , "src/crypto/fipsmodule/md5/md5.c"
+ , "src/crypto/fipsmodule/modes/cbc.c"
+ , "src/crypto/fipsmodule/modes/cfb.c"
+ , "src/crypto/fipsmodule/modes/ctr.c"
+ , "src/crypto/fipsmodule/modes/gcm.c"
+ , "src/crypto/fipsmodule/modes/gcm_nohw.c"
+ , "src/crypto/fipsmodule/modes/ofb.c"
+ , "src/crypto/fipsmodule/modes/polyval.c"
+ , "src/crypto/fipsmodule/rand/ctrdrbg.c"
+ , "src/crypto/fipsmodule/rand/fork_detect.c"
+ , "src/crypto/fipsmodule/rand/rand.c"
+ , "src/crypto/fipsmodule/rand/urandom.c"
+ , "src/crypto/fipsmodule/rsa/blinding.c"
+ , "src/crypto/fipsmodule/rsa/padding.c"
+ , "src/crypto/fipsmodule/rsa/rsa.c"
+ , "src/crypto/fipsmodule/rsa/rsa_impl.c"
+ , "src/crypto/fipsmodule/self_check/self_check.c"
+ , "src/crypto/fipsmodule/sha/sha1-altivec.c"
+ , "src/crypto/fipsmodule/sha/sha1.c"
+ , "src/crypto/fipsmodule/sha/sha256.c"
+ , "src/crypto/fipsmodule/sha/sha512.c"
+ , "src/crypto/fipsmodule/tls/kdf.c"
+ ]
+ }
+, "crypto_sources_ios_aarch64":
+ { "type": "install"
+ , "deps":
+ [ "ios-aarch64/crypto/chacha/chacha-armv8.S"
+ , "ios-aarch64/crypto/fipsmodule/aesv8-armx64.S"
+ , "ios-aarch64/crypto/fipsmodule/armv8-mont.S"
+ , "ios-aarch64/crypto/fipsmodule/ghash-neon-armv8.S"
+ , "ios-aarch64/crypto/fipsmodule/ghashv8-armx64.S"
+ , "ios-aarch64/crypto/fipsmodule/sha1-armv8.S"
+ , "ios-aarch64/crypto/fipsmodule/sha256-armv8.S"
+ , "ios-aarch64/crypto/fipsmodule/sha512-armv8.S"
+ , "ios-aarch64/crypto/fipsmodule/vpaes-armv8.S"
+ , "ios-aarch64/crypto/test/trampoline-armv8.S"
+ ]
+ }
+, "crypto_sources_ios_arm":
+ { "type": "install"
+ , "deps":
+ [ "ios-arm/crypto/chacha/chacha-armv4.S"
+ , "ios-arm/crypto/fipsmodule/aesv8-armx32.S"
+ , "ios-arm/crypto/fipsmodule/armv4-mont.S"
+ , "ios-arm/crypto/fipsmodule/bsaes-armv7.S"
+ , "ios-arm/crypto/fipsmodule/ghash-armv4.S"
+ , "ios-arm/crypto/fipsmodule/ghashv8-armx32.S"
+ , "ios-arm/crypto/fipsmodule/sha1-armv4-large.S"
+ , "ios-arm/crypto/fipsmodule/sha256-armv4.S"
+ , "ios-arm/crypto/fipsmodule/sha512-armv4.S"
+ , "ios-arm/crypto/fipsmodule/vpaes-armv7.S"
+ , "ios-arm/crypto/test/trampoline-armv4.S"
+ ]
+ }
+, "crypto_sources_linux_aarch64":
+ { "type": "install"
+ , "deps":
+ [ "linux-aarch64/crypto/chacha/chacha-armv8.S"
+ , "linux-aarch64/crypto/fipsmodule/aesv8-armx64.S"
+ , "linux-aarch64/crypto/fipsmodule/armv8-mont.S"
+ , "linux-aarch64/crypto/fipsmodule/ghash-neon-armv8.S"
+ , "linux-aarch64/crypto/fipsmodule/ghashv8-armx64.S"
+ , "linux-aarch64/crypto/fipsmodule/sha1-armv8.S"
+ , "linux-aarch64/crypto/fipsmodule/sha256-armv8.S"
+ , "linux-aarch64/crypto/fipsmodule/sha512-armv8.S"
+ , "linux-aarch64/crypto/fipsmodule/vpaes-armv8.S"
+ , "linux-aarch64/crypto/test/trampoline-armv8.S"
+ ]
+ }
+, "crypto_sources_linux_arm":
+ { "type": "install"
+ , "deps":
+ [ "linux-arm/crypto/chacha/chacha-armv4.S"
+ , "linux-arm/crypto/fipsmodule/aesv8-armx32.S"
+ , "linux-arm/crypto/fipsmodule/armv4-mont.S"
+ , "linux-arm/crypto/fipsmodule/bsaes-armv7.S"
+ , "linux-arm/crypto/fipsmodule/ghash-armv4.S"
+ , "linux-arm/crypto/fipsmodule/ghashv8-armx32.S"
+ , "linux-arm/crypto/fipsmodule/sha1-armv4-large.S"
+ , "linux-arm/crypto/fipsmodule/sha256-armv4.S"
+ , "linux-arm/crypto/fipsmodule/sha512-armv4.S"
+ , "linux-arm/crypto/fipsmodule/vpaes-armv7.S"
+ , "linux-arm/crypto/test/trampoline-armv4.S"
+ , "src/crypto/curve25519/asm/x25519-asm-arm.S"
+ , "src/crypto/poly1305/poly1305_arm_asm.S"
+ ]
+ }
+, "crypto_sources_linux_ppc64le":
+ { "type": "install"
+ , "deps":
+ [ "linux-ppc64le/crypto/fipsmodule/aesp8-ppc.S"
+ , "linux-ppc64le/crypto/fipsmodule/ghashp8-ppc.S"
+ , "linux-ppc64le/crypto/test/trampoline-ppc.S"
+ ]
+ }
+, "crypto_sources_linux_x86":
+ { "type": "install"
+ , "deps":
+ [ "linux-x86/crypto/chacha/chacha-x86.S"
+ , "linux-x86/crypto/fipsmodule/aesni-x86.S"
+ , "linux-x86/crypto/fipsmodule/bn-586.S"
+ , "linux-x86/crypto/fipsmodule/co-586.S"
+ , "linux-x86/crypto/fipsmodule/ghash-ssse3-x86.S"
+ , "linux-x86/crypto/fipsmodule/ghash-x86.S"
+ , "linux-x86/crypto/fipsmodule/md5-586.S"
+ , "linux-x86/crypto/fipsmodule/sha1-586.S"
+ , "linux-x86/crypto/fipsmodule/sha256-586.S"
+ , "linux-x86/crypto/fipsmodule/sha512-586.S"
+ , "linux-x86/crypto/fipsmodule/vpaes-x86.S"
+ , "linux-x86/crypto/fipsmodule/x86-mont.S"
+ , "linux-x86/crypto/test/trampoline-x86.S"
+ ]
+ }
+, "crypto_sources_linux_x86_64":
+ { "type": "install"
+ , "deps":
+ [ "linux-x86_64/crypto/chacha/chacha-x86_64.S"
+ , "linux-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.S"
+ , "linux-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/aesni-x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/ghash-x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/md5-x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/p256-x86_64-asm.S"
+ , "linux-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm.S"
+ , "linux-x86_64/crypto/fipsmodule/rdrand-x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/rsaz-avx2.S"
+ , "linux-x86_64/crypto/fipsmodule/sha1-x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/sha256-x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/sha512-x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/vpaes-x86_64.S"
+ , "linux-x86_64/crypto/fipsmodule/x86_64-mont.S"
+ , "linux-x86_64/crypto/fipsmodule/x86_64-mont5.S"
+ , "linux-x86_64/crypto/test/trampoline-x86_64.S"
+ , "src/crypto/hrss/asm/poly_rq_mul.S"
+ ]
+ }
+, "crypto_sources_mac_x86":
+ { "type": "install"
+ , "deps":
+ [ "mac-x86/crypto/chacha/chacha-x86.S"
+ , "mac-x86/crypto/fipsmodule/aesni-x86.S"
+ , "mac-x86/crypto/fipsmodule/bn-586.S"
+ , "mac-x86/crypto/fipsmodule/co-586.S"
+ , "mac-x86/crypto/fipsmodule/ghash-ssse3-x86.S"
+ , "mac-x86/crypto/fipsmodule/ghash-x86.S"
+ , "mac-x86/crypto/fipsmodule/md5-586.S"
+ , "mac-x86/crypto/fipsmodule/sha1-586.S"
+ , "mac-x86/crypto/fipsmodule/sha256-586.S"
+ , "mac-x86/crypto/fipsmodule/sha512-586.S"
+ , "mac-x86/crypto/fipsmodule/vpaes-x86.S"
+ , "mac-x86/crypto/fipsmodule/x86-mont.S"
+ , "mac-x86/crypto/test/trampoline-x86.S"
+ ]
+ }
+, "crypto_sources_mac_x86_64":
+ { "type": "install"
+ , "deps":
+ [ "mac-x86_64/crypto/chacha/chacha-x86_64.S"
+ , "mac-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.S"
+ , "mac-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/aesni-x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/ghash-x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/md5-x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/p256-x86_64-asm.S"
+ , "mac-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm.S"
+ , "mac-x86_64/crypto/fipsmodule/rdrand-x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/rsaz-avx2.S"
+ , "mac-x86_64/crypto/fipsmodule/sha1-x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/sha256-x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/sha512-x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/vpaes-x86_64.S"
+ , "mac-x86_64/crypto/fipsmodule/x86_64-mont.S"
+ , "mac-x86_64/crypto/fipsmodule/x86_64-mont5.S"
+ , "mac-x86_64/crypto/test/trampoline-x86_64.S"
+ ]
+ }
+, "crypto_sources_win_x86":
+ { "type": "install"
+ , "deps":
+ [ "win-x86/crypto/chacha/chacha-x86.asm"
+ , "win-x86/crypto/fipsmodule/aesni-x86.asm"
+ , "win-x86/crypto/fipsmodule/bn-586.asm"
+ , "win-x86/crypto/fipsmodule/co-586.asm"
+ , "win-x86/crypto/fipsmodule/ghash-ssse3-x86.asm"
+ , "win-x86/crypto/fipsmodule/ghash-x86.asm"
+ , "win-x86/crypto/fipsmodule/md5-586.asm"
+ , "win-x86/crypto/fipsmodule/sha1-586.asm"
+ , "win-x86/crypto/fipsmodule/sha256-586.asm"
+ , "win-x86/crypto/fipsmodule/sha512-586.asm"
+ , "win-x86/crypto/fipsmodule/vpaes-x86.asm"
+ , "win-x86/crypto/fipsmodule/x86-mont.asm"
+ , "win-x86/crypto/test/trampoline-x86.asm"
+ ]
+ }
+, "crypto_sources_win_x86_64":
+ { "type": "install"
+ , "deps":
+ [ "win-x86_64/crypto/chacha/chacha-x86_64.asm"
+ , "win-x86_64/crypto/cipher_extra/aes128gcmsiv-x86_64.asm"
+ , "win-x86_64/crypto/cipher_extra/chacha20_poly1305_x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/aesni-gcm-x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/aesni-x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/ghash-ssse3-x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/ghash-x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/md5-x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/p256-x86_64-asm.asm"
+ , "win-x86_64/crypto/fipsmodule/p256_beeu-x86_64-asm.asm"
+ , "win-x86_64/crypto/fipsmodule/rdrand-x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/rsaz-avx2.asm"
+ , "win-x86_64/crypto/fipsmodule/sha1-x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/sha256-x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/sha512-x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/vpaes-x86_64.asm"
+ , "win-x86_64/crypto/fipsmodule/x86_64-mont.asm"
+ , "win-x86_64/crypto/fipsmodule/x86_64-mont5.asm"
+ , "win-x86_64/crypto/test/trampoline-x86_64.asm"
+ ]
+ }
+, "crypto_sources_asm":
+ { "type": "install"
+ , "arguments_config": ["OS", "ARCH", "TARGET_ARCH"]
+ , "deps":
+ { "type": "let*"
+ , "bindings":
+ [ [ "PLATFORM"
+ , { "type": "join"
+ , "separator": "_"
+ , "$1":
+ [ {"type": "var", "name": "OS"}
+ , { "type": "var"
+ , "name": "TARGET_ARCH"
+ , "default": {"type": "var", "name": "ARCH"}
+ }
+ ]
+ }
+ ]
+ ]
+ , "body":
+ { "type": "cond"
+ , "cond":
+ [ [ { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "linux_ppc64le"
+ }
+ , ["crypto_sources_linux_ppc64le"]
+ ]
+ , [ { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "linux_x86_64"
+ }
+ , ["crypto_sources_linux_x86_64"]
+ ]
+ , [ { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "mac_x86_64"
+ }
+ , ["crypto_sources_mac_x86_64"]
+ ]
+ ]
+ }
+ }
+ }
+, "ssl_sources":
+ { "type": "install"
+ , "deps":
+ [ "src/ssl/bio_ssl.cc"
+ , "src/ssl/d1_both.cc"
+ , "src/ssl/d1_lib.cc"
+ , "src/ssl/d1_pkt.cc"
+ , "src/ssl/d1_srtp.cc"
+ , "src/ssl/dtls_method.cc"
+ , "src/ssl/dtls_record.cc"
+ , "src/ssl/handoff.cc"
+ , "src/ssl/handshake.cc"
+ , "src/ssl/handshake_client.cc"
+ , "src/ssl/handshake_server.cc"
+ , "src/ssl/s3_both.cc"
+ , "src/ssl/s3_lib.cc"
+ , "src/ssl/s3_pkt.cc"
+ , "src/ssl/ssl_aead_ctx.cc"
+ , "src/ssl/ssl_asn1.cc"
+ , "src/ssl/ssl_buffer.cc"
+ , "src/ssl/ssl_cert.cc"
+ , "src/ssl/ssl_cipher.cc"
+ , "src/ssl/ssl_file.cc"
+ , "src/ssl/ssl_key_share.cc"
+ , "src/ssl/ssl_lib.cc"
+ , "src/ssl/ssl_privkey.cc"
+ , "src/ssl/ssl_session.cc"
+ , "src/ssl/ssl_stat.cc"
+ , "src/ssl/ssl_transcript.cc"
+ , "src/ssl/ssl_versions.cc"
+ , "src/ssl/ssl_x509.cc"
+ , "src/ssl/t1_enc.cc"
+ , "src/ssl/t1_lib.cc"
+ , "src/ssl/tls13_both.cc"
+ , "src/ssl/tls13_client.cc"
+ , "src/ssl/tls13_enc.cc"
+ , "src/ssl/tls13_server.cc"
+ , "src/ssl/tls_method.cc"
+ , "src/ssl/tls_record.cc"
+ ]
+ }
+, "ssl_internal_headers": {"type": "install", "deps": ["src/ssl/internal.h"]}
+}
diff --git a/etc/import/TARGETS.cares b/etc/import/TARGETS.cares
new file mode 100644
index 00000000..5b7df47f
--- /dev/null
+++ b/etc/import/TARGETS.cares
@@ -0,0 +1,92 @@
+{ "ares":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["ares"]
+ , "pure C": ["YES"]
+ , "local defines":
+ [ "_GNU_SOURCE"
+ , "_HAS_EXCEPTIONS=0"
+ , "HAVE_CONFIG_H"
+ , "CARES_STATICLIB"
+ ]
+ , "srcs":
+ [ "ares__close_sockets.c"
+ , "ares__get_hostent.c"
+ , "ares__read_line.c"
+ , "ares__timeval.c"
+ , "ares_cancel.c"
+ , "ares_create_query.c"
+ , "ares_data.c"
+ , "ares_destroy.c"
+ , "ares_expand_name.c"
+ , "ares_expand_string.c"
+ , "ares_fds.c"
+ , "ares_free_hostent.c"
+ , "ares_free_string.c"
+ , "ares_getenv.c"
+ , "ares_gethostbyaddr.c"
+ , "ares_gethostbyname.c"
+ , "ares_getnameinfo.c"
+ , "ares_getopt.c"
+ , "ares_getsock.c"
+ , "ares_init.c"
+ , "ares_library_init.c"
+ , "ares_llist.c"
+ , "ares_mkquery.c"
+ , "ares_nowarn.c"
+ , "ares_options.c"
+ , "ares_parse_a_reply.c"
+ , "ares_parse_aaaa_reply.c"
+ , "ares_parse_mx_reply.c"
+ , "ares_parse_naptr_reply.c"
+ , "ares_parse_ns_reply.c"
+ , "ares_parse_ptr_reply.c"
+ , "ares_parse_soa_reply.c"
+ , "ares_parse_srv_reply.c"
+ , "ares_parse_txt_reply.c"
+ , "ares_platform.c"
+ , "ares_process.c"
+ , "ares_query.c"
+ , "ares_search.c"
+ , "ares_send.c"
+ , "ares_strcasecmp.c"
+ , "ares_strdup.c"
+ , "ares_strsplit.c"
+ , "ares_strerror.c"
+ , "ares_timeout.c"
+ , "ares_version.c"
+ , "ares_writev.c"
+ , "bitncmp.c"
+ , "inet_net_pton.c"
+ , "inet_ntop.c"
+ , "windows_port.c"
+ ]
+ , "hdrs":
+ [ "ares.h"
+ , ["@", "grpc", "third_party", "ares_build_h"]
+ , ["@", "grpc", "third_party", "ares_config_h"]
+ , "ares_data.h"
+ , "ares_dns.h"
+ , "ares_getenv.h"
+ , "ares_getopt.h"
+ , "ares_inet_net_pton.h"
+ , "ares_iphlpapi.h"
+ , "ares_ipv6.h"
+ , "ares_library_init.h"
+ , "ares_llist.h"
+ , "ares_nowarn.h"
+ , "ares_platform.h"
+ , "ares_private.h"
+ , "ares_rules.h"
+ , "ares_setup.h"
+ , "ares_strcasecmp.h"
+ , "ares_strdup.h"
+ , "ares_strsplit.h"
+ , "ares_version.h"
+ , "ares_writev.h"
+ , "bitncmp.h"
+ , "config-win32.h"
+ , "nameser.h"
+ , "setup_once.h"
+ ]
+ }
+}
diff --git a/etc/import/TARGETS.catch2 b/etc/import/TARGETS.catch2
new file mode 100644
index 00000000..266386ed
--- /dev/null
+++ b/etc/import/TARGETS.catch2
@@ -0,0 +1,7 @@
+{ "catch2":
+ { "type": ["@", "rules", "CC", "library"]
+ , "hdrs": ["catch.hpp"]
+ , "link external": ["-lpthread"]
+ , "stage": ["catch2"]
+ }
+}
diff --git a/etc/import/TARGETS.cli11 b/etc/import/TARGETS.cli11
new file mode 100644
index 00000000..37f3e96d
--- /dev/null
+++ b/etc/import/TARGETS.cli11
@@ -0,0 +1,5 @@
+{ "cli11":
+ { "type": ["@", "rules", "CC", "library"]
+ , "hdrs": [["./", "include/CLI", "cli11_headers"]]
+ }
+}
diff --git a/etc/import/TARGETS.fmt b/etc/import/TARGETS.fmt
new file mode 100644
index 00000000..a9434440
--- /dev/null
+++ b/etc/import/TARGETS.fmt
@@ -0,0 +1,12 @@
+{ "fmt-lib":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["fmt"]
+ , "srcs": ["src/format.cc", "src/os.cc"]
+ , "hdrs": [["./", "include/fmt", "hdrs"]]
+ }
+, "fmt":
+ { "type": "export"
+ , "target": "fmt-lib"
+ , "flexible_config": ["CXX", "CXXFLAGS", "AR", "ENV"]
+ }
+}
diff --git a/etc/import/TARGETS.git2 b/etc/import/TARGETS.git2
new file mode 100644
index 00000000..bdc7a446
--- /dev/null
+++ b/etc/import/TARGETS.git2
@@ -0,0 +1,26 @@
+{ "git2":
+ { "type": "export"
+ , "target": ["./", "src", "git2internal"]
+ , "flexible_config":
+ [ "OS"
+ , "ARCH"
+ , "TARGET_ARCH"
+ , "CC"
+ , "AR"
+ , "ENV"
+ , "USE_SYSTEM_LIBS"
+ , "DEBUG_POOL"
+ , "ENABLE_TRACE"
+ , "THREADSAFE"
+ , "USE_ICONV"
+ , "USE_NSEC"
+ , "REGEX_BACKEND"
+ , "USE_SSH"
+ , "USE_NTLMCLIENT"
+ , "USE_GSSAPI"
+ , "USE_SHA1"
+ , "USE_HTTPS"
+ , "WINHTTP"
+ ]
+ }
+}
diff --git a/etc/import/TARGETS.google_apis b/etc/import/TARGETS.google_apis
new file mode 100644
index 00000000..fa41ca26
--- /dev/null
+++ b/etc/import/TARGETS.google_apis
@@ -0,0 +1,45 @@
+{ "google_api_http_proto":
+ { "type": ["@", "rules", "proto", "library"]
+ , "name": ["google_api_http_proto"]
+ , "srcs": ["google/api/http.proto"]
+ }
+, "google_api_annotations_proto":
+ { "type": ["@", "rules", "proto", "library"]
+ , "name": ["google_api_annotations_proto"]
+ , "srcs": ["google/api/annotations.proto"]
+ , "deps": ["google_api_http_proto"]
+ }
+, "google_bytestream_proto":
+ { "type": ["@", "rules", "proto", "library"]
+ , "name": ["google_bytestream_proto"]
+ , "service": ["yes"]
+ , "srcs": ["google/bytestream/bytestream.proto"]
+ , "deps": ["google_api_annotations_proto"]
+ }
+, "google_rpc_status_proto":
+ { "type": ["@", "rules", "proto", "library"]
+ , "name": ["google_rpc_status_proto"]
+ , "srcs": ["google/rpc/status.proto"]
+ }
+, "google_longrunning_operations_proto":
+ { "type": ["@", "rules", "proto", "library"]
+ , "name": ["google_longrunning_operations_proto"]
+ , "srcs": ["google/longrunning/operations.proto"]
+ , "deps":
+ [ "google_api_annotations_proto"
+ , "google_api_http_proto"
+ , "google_rpc_status_proto"
+ ]
+ }
+, "google_apis":
+ { "type": ["@", "rules", "CC/proto", "library"]
+ , "name": ["google_apis"]
+ , "srcs":
+ [ "google_api_http_proto"
+ , "google_api_annotations_proto"
+ , "google_bytestream_proto"
+ , "google_rpc_status_proto"
+ , "google_longrunning_operations_proto"
+ ]
+ }
+}
diff --git a/etc/import/TARGETS.grpc b/etc/import/TARGETS.grpc
new file mode 100644
index 00000000..5c4ee009
--- /dev/null
+++ b/etc/import/TARGETS.grpc
@@ -0,0 +1,1601 @@
+{ "grpc++_public_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ ["./", "include/grpc++", "grpc++_public_headers"]
+ , ["./", "include/grpcpp", "grpcpp_public_headers"]
+ ]
+ }
+, "gpr_base":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["gpr_base"]
+ , "srcs":
+ [ "src/core/lib/gpr/alloc.cc"
+ , "src/core/lib/gpr/atm.cc"
+ , "src/core/lib/gpr/cpu_iphone.cc"
+ , "src/core/lib/gpr/cpu_linux.cc"
+ , "src/core/lib/gpr/cpu_posix.cc"
+ , "src/core/lib/gpr/cpu_windows.cc"
+ , "src/core/lib/gpr/env_linux.cc"
+ , "src/core/lib/gpr/env_posix.cc"
+ , "src/core/lib/gpr/env_windows.cc"
+ , "src/core/lib/gpr/log.cc"
+ , "src/core/lib/gpr/log_android.cc"
+ , "src/core/lib/gpr/log_linux.cc"
+ , "src/core/lib/gpr/log_posix.cc"
+ , "src/core/lib/gpr/log_windows.cc"
+ , "src/core/lib/gpr/murmur_hash.cc"
+ , "src/core/lib/gpr/string.cc"
+ , "src/core/lib/gpr/string_posix.cc"
+ , "src/core/lib/gpr/string_util_windows.cc"
+ , "src/core/lib/gpr/string_windows.cc"
+ , "src/core/lib/gpr/sync.cc"
+ , "src/core/lib/gpr/sync_abseil.cc"
+ , "src/core/lib/gpr/sync_posix.cc"
+ , "src/core/lib/gpr/sync_windows.cc"
+ , "src/core/lib/gpr/time.cc"
+ , "src/core/lib/gpr/time_posix.cc"
+ , "src/core/lib/gpr/time_precise.cc"
+ , "src/core/lib/gpr/time_windows.cc"
+ , "src/core/lib/gpr/tls_pthread.cc"
+ , "src/core/lib/gpr/tmpfile_msys.cc"
+ , "src/core/lib/gpr/tmpfile_posix.cc"
+ , "src/core/lib/gpr/tmpfile_windows.cc"
+ , "src/core/lib/gpr/wrap_memcpy.cc"
+ , "src/core/lib/gprpp/arena.cc"
+ , "src/core/lib/gprpp/fork.cc"
+ , "src/core/lib/gprpp/global_config_env.cc"
+ , "src/core/lib/gprpp/host_port.cc"
+ , "src/core/lib/gprpp/mpscq.cc"
+ , "src/core/lib/gprpp/thd_posix.cc"
+ , "src/core/lib/gprpp/thd_windows.cc"
+ , "src/core/lib/profiling/basic_timers.cc"
+ , "src/core/lib/profiling/stap_timers.cc"
+ ]
+ , "hdrs":
+ [ ["./", "include/grpc", "gpr_public_headers"]
+ , "src/core/lib/gpr/alloc.h"
+ , "src/core/lib/gpr/arena.h"
+ , "src/core/lib/gpr/env.h"
+ , "src/core/lib/gpr/murmur_hash.h"
+ , "src/core/lib/gpr/spinlock.h"
+ , "src/core/lib/gpr/string.h"
+ , "src/core/lib/gpr/string_windows.h"
+ , "src/core/lib/gpr/time_precise.h"
+ , "src/core/lib/gpr/tls.h"
+ , "src/core/lib/gpr/tls_gcc.h"
+ , "src/core/lib/gpr/tls_msvc.h"
+ , "src/core/lib/gpr/tls_pthread.h"
+ , "src/core/lib/gpr/tmpfile.h"
+ , "src/core/lib/gpr/useful.h"
+ , "src/core/lib/gprpp/arena.h"
+ , "src/core/lib/gprpp/atomic.h"
+ , "src/core/lib/gprpp/fork.h"
+ , "src/core/lib/gprpp/global_config.h"
+ , "src/core/lib/gprpp/global_config_custom.h"
+ , "src/core/lib/gprpp/global_config_env.h"
+ , "src/core/lib/gprpp/global_config_generic.h"
+ , "src/core/lib/gprpp/host_port.h"
+ , "src/core/lib/gprpp/manual_constructor.h"
+ , "src/core/lib/gprpp/map.h"
+ , "src/core/lib/gprpp/memory.h"
+ , "src/core/lib/gprpp/mpscq.h"
+ , "src/core/lib/gprpp/sync.h"
+ , "src/core/lib/gprpp/thd.h"
+ , "src/core/lib/profiling/timers.h"
+ ]
+ , "deps":
+ [ ["./", "include/grpc", "gpr_codegen"]
+ , ["./", "include/grpc", "grpc_codegen"]
+ , ["@", "absl", "absl/memory", "memory"]
+ , ["@", "absl", "absl/strings", "strings"]
+ , ["@", "absl", "absl/strings", "str_format"]
+ , ["@", "absl", "absl/time", "time"]
+ ]
+ }
+, "gpr":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["gpr"]
+ , "hdrs": [["./", "include/grpc", "gpr_public_headers"]]
+ , "deps": ["gpr_base"]
+ }
+, "atomic":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["atomic"]
+ , "hdrs": ["src/core/lib/gprpp/atomic.h"]
+ , "deps": ["gpr"]
+ }
+, "debug_location":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["debug_location"]
+ , "hdrs": ["src/core/lib/gprpp/debug_location.h"]
+ }
+, "ref_counted_ptr":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["ref_counted_ptr"]
+ , "hdrs": ["src/core/lib/gprpp/ref_counted_ptr.h"]
+ , "deps": ["gpr_base"]
+ }
+, "ref_counted":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["ref_counted"]
+ , "hdrs": ["src/core/lib/gprpp/ref_counted.h"]
+ , "deps":
+ [ "atomic"
+ , "debug_location"
+ , "gpr_base"
+ , "grpc_trace"
+ , "ref_counted_ptr"
+ ]
+ }
+, "orphanable":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["orphanable"]
+ , "hdrs": ["src/core/lib/gprpp/orphanable.h"]
+ , "deps":
+ [ "debug_location"
+ , "gpr_base"
+ , "grpc_trace"
+ , "ref_counted"
+ , "ref_counted_ptr"
+ ]
+ }
+, "eventmanager_libuv":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["eventmanager_libuv"]
+ , "srcs": ["src/core/lib/iomgr/poller/eventmanager_libuv.cc"]
+ , "private-hdrs": ["src/core/lib/iomgr/poller/eventmanager_libuv.h"]
+ , "deps": ["gpr_base"]
+ }
+, "grpc_trace":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_trace"]
+ , "srcs": ["src/core/lib/debug/trace.cc"]
+ , "hdrs":
+ [ ["./", "include/grpc", "grpc_public_headers"]
+ , "src/core/lib/debug/trace.h"
+ ]
+ , "deps": [["./", "include/grpc", "grpc_codegen"], "gpr"]
+ }
+, "grpc_base_c":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_base_c"]
+ , "srcs":
+ [ "src/core/lib/avl/avl.cc"
+ , "src/core/lib/backoff/backoff.cc"
+ , "src/core/lib/channel/channel_args.cc"
+ , "src/core/lib/channel/channel_stack.cc"
+ , "src/core/lib/channel/channel_stack_builder.cc"
+ , "src/core/lib/channel/channel_trace.cc"
+ , "src/core/lib/channel/channelz.cc"
+ , "src/core/lib/channel/channelz_registry.cc"
+ , "src/core/lib/channel/connected_channel.cc"
+ , "src/core/lib/channel/handshaker.cc"
+ , "src/core/lib/channel/handshaker_registry.cc"
+ , "src/core/lib/channel/status_util.cc"
+ , "src/core/lib/compression/compression.cc"
+ , "src/core/lib/compression/compression_args.cc"
+ , "src/core/lib/compression/compression_internal.cc"
+ , "src/core/lib/compression/message_compress.cc"
+ , "src/core/lib/compression/stream_compression.cc"
+ , "src/core/lib/compression/stream_compression_gzip.cc"
+ , "src/core/lib/compression/stream_compression_identity.cc"
+ , "src/core/lib/debug/stats.cc"
+ , "src/core/lib/debug/stats_data.cc"
+ , "src/core/lib/http/format_request.cc"
+ , "src/core/lib/http/httpcli.cc"
+ , "src/core/lib/http/parser.cc"
+ , "src/core/lib/iomgr/buffer_list.cc"
+ , "src/core/lib/iomgr/call_combiner.cc"
+ , "src/core/lib/iomgr/cfstream_handle.cc"
+ , "src/core/lib/iomgr/combiner.cc"
+ , "src/core/lib/iomgr/dualstack_socket_posix.cc"
+ , "src/core/lib/iomgr/endpoint.cc"
+ , "src/core/lib/iomgr/endpoint_cfstream.cc"
+ , "src/core/lib/iomgr/endpoint_pair_posix.cc"
+ , "src/core/lib/iomgr/endpoint_pair_uv.cc"
+ , "src/core/lib/iomgr/endpoint_pair_windows.cc"
+ , "src/core/lib/iomgr/error.cc"
+ , "src/core/lib/iomgr/error_cfstream.cc"
+ , "src/core/lib/iomgr/ev_apple.cc"
+ , "src/core/lib/iomgr/ev_epoll1_linux.cc"
+ , "src/core/lib/iomgr/ev_epollex_linux.cc"
+ , "src/core/lib/iomgr/ev_poll_posix.cc"
+ , "src/core/lib/iomgr/ev_posix.cc"
+ , "src/core/lib/iomgr/ev_windows.cc"
+ , "src/core/lib/iomgr/exec_ctx.cc"
+ , "src/core/lib/iomgr/executor.cc"
+ , "src/core/lib/iomgr/executor/mpmcqueue.cc"
+ , "src/core/lib/iomgr/executor/threadpool.cc"
+ , "src/core/lib/iomgr/fork_posix.cc"
+ , "src/core/lib/iomgr/fork_windows.cc"
+ , "src/core/lib/iomgr/gethostname_fallback.cc"
+ , "src/core/lib/iomgr/gethostname_host_name_max.cc"
+ , "src/core/lib/iomgr/gethostname_sysconf.cc"
+ , "src/core/lib/iomgr/grpc_if_nametoindex_posix.cc"
+ , "src/core/lib/iomgr/grpc_if_nametoindex_unsupported.cc"
+ , "src/core/lib/iomgr/internal_errqueue.cc"
+ , "src/core/lib/iomgr/iocp_windows.cc"
+ , "src/core/lib/iomgr/iomgr.cc"
+ , "src/core/lib/iomgr/iomgr_custom.cc"
+ , "src/core/lib/iomgr/iomgr_internal.cc"
+ , "src/core/lib/iomgr/iomgr_posix.cc"
+ , "src/core/lib/iomgr/iomgr_posix_cfstream.cc"
+ , "src/core/lib/iomgr/iomgr_uv.cc"
+ , "src/core/lib/iomgr/iomgr_windows.cc"
+ , "src/core/lib/iomgr/is_epollexclusive_available.cc"
+ , "src/core/lib/iomgr/load_file.cc"
+ , "src/core/lib/iomgr/lockfree_event.cc"
+ , "src/core/lib/iomgr/polling_entity.cc"
+ , "src/core/lib/iomgr/pollset.cc"
+ , "src/core/lib/iomgr/pollset_custom.cc"
+ , "src/core/lib/iomgr/pollset_set.cc"
+ , "src/core/lib/iomgr/pollset_set_custom.cc"
+ , "src/core/lib/iomgr/pollset_set_windows.cc"
+ , "src/core/lib/iomgr/pollset_uv.cc"
+ , "src/core/lib/iomgr/pollset_windows.cc"
+ , "src/core/lib/iomgr/resolve_address.cc"
+ , "src/core/lib/iomgr/resolve_address_custom.cc"
+ , "src/core/lib/iomgr/resolve_address_posix.cc"
+ , "src/core/lib/iomgr/resolve_address_windows.cc"
+ , "src/core/lib/iomgr/resource_quota.cc"
+ , "src/core/lib/iomgr/sockaddr_utils.cc"
+ , "src/core/lib/iomgr/socket_factory_posix.cc"
+ , "src/core/lib/iomgr/socket_mutator.cc"
+ , "src/core/lib/iomgr/socket_utils_common_posix.cc"
+ , "src/core/lib/iomgr/socket_utils_linux.cc"
+ , "src/core/lib/iomgr/socket_utils_posix.cc"
+ , "src/core/lib/iomgr/socket_utils_uv.cc"
+ , "src/core/lib/iomgr/socket_utils_windows.cc"
+ , "src/core/lib/iomgr/socket_windows.cc"
+ , "src/core/lib/iomgr/tcp_client.cc"
+ , "src/core/lib/iomgr/tcp_client_cfstream.cc"
+ , "src/core/lib/iomgr/tcp_client_custom.cc"
+ , "src/core/lib/iomgr/tcp_client_posix.cc"
+ , "src/core/lib/iomgr/tcp_client_windows.cc"
+ , "src/core/lib/iomgr/tcp_custom.cc"
+ , "src/core/lib/iomgr/tcp_posix.cc"
+ , "src/core/lib/iomgr/tcp_server.cc"
+ , "src/core/lib/iomgr/tcp_server_custom.cc"
+ , "src/core/lib/iomgr/tcp_server_posix.cc"
+ , "src/core/lib/iomgr/tcp_server_utils_posix_common.cc"
+ , "src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc"
+ , "src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc"
+ , "src/core/lib/iomgr/tcp_server_windows.cc"
+ , "src/core/lib/iomgr/tcp_uv.cc"
+ , "src/core/lib/iomgr/tcp_windows.cc"
+ , "src/core/lib/iomgr/time_averaged_stats.cc"
+ , "src/core/lib/iomgr/timer.cc"
+ , "src/core/lib/iomgr/timer_custom.cc"
+ , "src/core/lib/iomgr/timer_generic.cc"
+ , "src/core/lib/iomgr/timer_heap.cc"
+ , "src/core/lib/iomgr/timer_manager.cc"
+ , "src/core/lib/iomgr/timer_uv.cc"
+ , "src/core/lib/iomgr/udp_server.cc"
+ , "src/core/lib/iomgr/unix_sockets_posix.cc"
+ , "src/core/lib/iomgr/unix_sockets_posix_noop.cc"
+ , "src/core/lib/iomgr/wakeup_fd_eventfd.cc"
+ , "src/core/lib/iomgr/wakeup_fd_nospecial.cc"
+ , "src/core/lib/iomgr/wakeup_fd_pipe.cc"
+ , "src/core/lib/iomgr/wakeup_fd_posix.cc"
+ , "src/core/lib/iomgr/work_serializer.cc"
+ , "src/core/lib/json/json_reader.cc"
+ , "src/core/lib/json/json_writer.cc"
+ , "src/core/lib/slice/b64.cc"
+ , "src/core/lib/slice/percent_encoding.cc"
+ , "src/core/lib/slice/slice.cc"
+ , "src/core/lib/slice/slice_buffer.cc"
+ , "src/core/lib/slice/slice_intern.cc"
+ , "src/core/lib/slice/slice_string_helpers.cc"
+ , "src/core/lib/surface/api_trace.cc"
+ , "src/core/lib/surface/byte_buffer.cc"
+ , "src/core/lib/surface/byte_buffer_reader.cc"
+ , "src/core/lib/surface/call.cc"
+ , "src/core/lib/surface/call_details.cc"
+ , "src/core/lib/surface/call_log_batch.cc"
+ , "src/core/lib/surface/channel.cc"
+ , "src/core/lib/surface/channel_init.cc"
+ , "src/core/lib/surface/channel_ping.cc"
+ , "src/core/lib/surface/channel_stack_type.cc"
+ , "src/core/lib/surface/completion_queue.cc"
+ , "src/core/lib/surface/completion_queue_factory.cc"
+ , "src/core/lib/surface/event_string.cc"
+ , "src/core/lib/surface/metadata_array.cc"
+ , "src/core/lib/surface/server.cc"
+ , "src/core/lib/surface/validate_metadata.cc"
+ , "src/core/lib/surface/version.cc"
+ , "src/core/lib/transport/bdp_estimator.cc"
+ , "src/core/lib/transport/byte_stream.cc"
+ , "src/core/lib/transport/connectivity_state.cc"
+ , "src/core/lib/transport/error_utils.cc"
+ , "src/core/lib/transport/metadata.cc"
+ , "src/core/lib/transport/metadata_batch.cc"
+ , "src/core/lib/transport/pid_controller.cc"
+ , "src/core/lib/transport/static_metadata.cc"
+ , "src/core/lib/transport/status_conversion.cc"
+ , "src/core/lib/transport/status_metadata.cc"
+ , "src/core/lib/transport/timeout_encoding.cc"
+ , "src/core/lib/transport/transport.cc"
+ , "src/core/lib/transport/transport_op_string.cc"
+ , "src/core/lib/uri/uri_parser.cc"
+ ]
+ , "hdrs":
+ [ "src/core/lib/avl/avl.h"
+ , "src/core/lib/backoff/backoff.h"
+ , "src/core/lib/channel/channel_args.h"
+ , "src/core/lib/channel/channel_stack.h"
+ , "src/core/lib/channel/channel_stack_builder.h"
+ , "src/core/lib/channel/channel_trace.h"
+ , "src/core/lib/channel/channelz.h"
+ , "src/core/lib/channel/channelz_registry.h"
+ , "src/core/lib/channel/connected_channel.h"
+ , "src/core/lib/channel/context.h"
+ , "src/core/lib/channel/handshaker.h"
+ , "src/core/lib/channel/handshaker_factory.h"
+ , "src/core/lib/channel/handshaker_registry.h"
+ , "src/core/lib/channel/status_util.h"
+ , "src/core/lib/compression/algorithm_metadata.h"
+ , "src/core/lib/compression/compression_args.h"
+ , "src/core/lib/compression/compression_internal.h"
+ , "src/core/lib/compression/message_compress.h"
+ , "src/core/lib/compression/stream_compression.h"
+ , "src/core/lib/compression/stream_compression_gzip.h"
+ , "src/core/lib/compression/stream_compression_identity.h"
+ , "src/core/lib/debug/stats.h"
+ , "src/core/lib/debug/stats_data.h"
+ , "src/core/lib/http/format_request.h"
+ , "src/core/lib/http/httpcli.h"
+ , "src/core/lib/http/parser.h"
+ , "src/core/lib/iomgr/block_annotate.h"
+ , "src/core/lib/iomgr/buffer_list.h"
+ , "src/core/lib/iomgr/call_combiner.h"
+ , "src/core/lib/iomgr/cfstream_handle.h"
+ , "src/core/lib/iomgr/closure.h"
+ , "src/core/lib/iomgr/combiner.h"
+ , "src/core/lib/iomgr/dynamic_annotations.h"
+ , "src/core/lib/iomgr/endpoint.h"
+ , "src/core/lib/iomgr/endpoint_cfstream.h"
+ , "src/core/lib/iomgr/endpoint_pair.h"
+ , "src/core/lib/iomgr/error.h"
+ , "src/core/lib/iomgr/error_cfstream.h"
+ , "src/core/lib/iomgr/error_internal.h"
+ , "src/core/lib/iomgr/ev_apple.h"
+ , "src/core/lib/iomgr/ev_epoll1_linux.h"
+ , "src/core/lib/iomgr/ev_epollex_linux.h"
+ , "src/core/lib/iomgr/ev_poll_posix.h"
+ , "src/core/lib/iomgr/ev_posix.h"
+ , "src/core/lib/iomgr/exec_ctx.h"
+ , "src/core/lib/iomgr/executor.h"
+ , "src/core/lib/iomgr/executor/mpmcqueue.h"
+ , "src/core/lib/iomgr/executor/threadpool.h"
+ , "src/core/lib/iomgr/gethostname.h"
+ , "src/core/lib/iomgr/grpc_if_nametoindex.h"
+ , "src/core/lib/iomgr/internal_errqueue.h"
+ , "src/core/lib/iomgr/iocp_windows.h"
+ , "src/core/lib/iomgr/iomgr.h"
+ , "src/core/lib/iomgr/iomgr_custom.h"
+ , "src/core/lib/iomgr/iomgr_internal.h"
+ , "src/core/lib/iomgr/iomgr_posix.h"
+ , "src/core/lib/iomgr/is_epollexclusive_available.h"
+ , "src/core/lib/iomgr/load_file.h"
+ , "src/core/lib/iomgr/lockfree_event.h"
+ , "src/core/lib/iomgr/nameser.h"
+ , "src/core/lib/iomgr/polling_entity.h"
+ , "src/core/lib/iomgr/pollset.h"
+ , "src/core/lib/iomgr/pollset_custom.h"
+ , "src/core/lib/iomgr/pollset_set.h"
+ , "src/core/lib/iomgr/pollset_set_custom.h"
+ , "src/core/lib/iomgr/pollset_set_windows.h"
+ , "src/core/lib/iomgr/pollset_uv.h"
+ , "src/core/lib/iomgr/pollset_windows.h"
+ , "src/core/lib/iomgr/port.h"
+ , "src/core/lib/iomgr/python_util.h"
+ , "src/core/lib/iomgr/resolve_address.h"
+ , "src/core/lib/iomgr/resolve_address_custom.h"
+ , "src/core/lib/iomgr/resource_quota.h"
+ , "src/core/lib/iomgr/sockaddr.h"
+ , "src/core/lib/iomgr/sockaddr_custom.h"
+ , "src/core/lib/iomgr/sockaddr_posix.h"
+ , "src/core/lib/iomgr/sockaddr_utils.h"
+ , "src/core/lib/iomgr/sockaddr_windows.h"
+ , "src/core/lib/iomgr/socket_factory_posix.h"
+ , "src/core/lib/iomgr/socket_mutator.h"
+ , "src/core/lib/iomgr/socket_utils.h"
+ , "src/core/lib/iomgr/socket_utils_posix.h"
+ , "src/core/lib/iomgr/socket_windows.h"
+ , "src/core/lib/iomgr/sys_epoll_wrapper.h"
+ , "src/core/lib/iomgr/tcp_client.h"
+ , "src/core/lib/iomgr/tcp_client_posix.h"
+ , "src/core/lib/iomgr/tcp_custom.h"
+ , "src/core/lib/iomgr/tcp_posix.h"
+ , "src/core/lib/iomgr/tcp_server.h"
+ , "src/core/lib/iomgr/tcp_server_utils_posix.h"
+ , "src/core/lib/iomgr/tcp_windows.h"
+ , "src/core/lib/iomgr/time_averaged_stats.h"
+ , "src/core/lib/iomgr/timer.h"
+ , "src/core/lib/iomgr/timer_custom.h"
+ , "src/core/lib/iomgr/timer_generic.h"
+ , "src/core/lib/iomgr/timer_heap.h"
+ , "src/core/lib/iomgr/timer_manager.h"
+ , "src/core/lib/iomgr/udp_server.h"
+ , "src/core/lib/iomgr/unix_sockets_posix.h"
+ , "src/core/lib/iomgr/wakeup_fd_pipe.h"
+ , "src/core/lib/iomgr/wakeup_fd_posix.h"
+ , "src/core/lib/iomgr/work_serializer.h"
+ , "src/core/lib/json/json.h"
+ , "src/core/lib/slice/b64.h"
+ , "src/core/lib/slice/percent_encoding.h"
+ , "src/core/lib/slice/slice_hash_table.h"
+ , "src/core/lib/slice/slice_internal.h"
+ , "src/core/lib/slice/slice_string_helpers.h"
+ , "src/core/lib/slice/slice_utils.h"
+ , "src/core/lib/slice/slice_weak_hash_table.h"
+ , "src/core/lib/surface/api_trace.h"
+ , "src/core/lib/surface/call.h"
+ , "src/core/lib/surface/call_test_only.h"
+ , "src/core/lib/surface/channel.h"
+ , "src/core/lib/surface/channel_init.h"
+ , "src/core/lib/surface/channel_stack_type.h"
+ , "src/core/lib/surface/completion_queue.h"
+ , "src/core/lib/surface/completion_queue_factory.h"
+ , "src/core/lib/surface/event_string.h"
+ , "src/core/lib/surface/init.h"
+ , "src/core/lib/surface/lame_client.h"
+ , "src/core/lib/surface/server.h"
+ , "src/core/lib/surface/validate_metadata.h"
+ , "src/core/lib/transport/bdp_estimator.h"
+ , "src/core/lib/transport/byte_stream.h"
+ , "src/core/lib/transport/connectivity_state.h"
+ , "src/core/lib/transport/error_utils.h"
+ , "src/core/lib/transport/http2_errors.h"
+ , "src/core/lib/transport/metadata.h"
+ , "src/core/lib/transport/metadata_batch.h"
+ , "src/core/lib/transport/pid_controller.h"
+ , "src/core/lib/transport/static_metadata.h"
+ , "src/core/lib/transport/status_conversion.h"
+ , "src/core/lib/transport/status_metadata.h"
+ , "src/core/lib/transport/timeout_encoding.h"
+ , "src/core/lib/transport/transport.h"
+ , "src/core/lib/transport/transport_impl.h"
+ , "src/core/lib/uri/uri_parser.h"
+ ]
+ , "deps":
+ [ "eventmanager_libuv"
+ , "gpr_base"
+ , ["./", "include/grpc", "grpc_codegen"]
+ , "grpc_trace"
+ , "orphanable"
+ , "ref_counted"
+ , "ref_counted_ptr"
+ , ["@", "zlib", ".", "zlib"]
+ , ["@", "absl", "absl/container", "inlined_vector"]
+ , ["@", "absl", "absl/types", "optional"]
+ ]
+ }
+, "grpc_base":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_base"]
+ , "srcs": ["src/core/lib/surface/lame_client.cc"]
+ , "deps": ["atomic", "grpc_base_c"]
+ }
+, "census":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["census"]
+ , "srcs": ["src/core/ext/filters/census/grpc_context.cc"]
+ , "hdrs": [["./", "include/grpc", "census_headers"]]
+ , "deps": ["grpc_base"]
+ }
+, "grpc_deadline_filter":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_deadline_filter"]
+ , "srcs": ["src/core/ext/filters/deadline/deadline_filter.cc"]
+ , "hdrs": ["src/core/ext/filters/deadline/deadline_filter.h"]
+ , "deps": ["grpc_base"]
+ }
+, "grpc_client_authority_filter":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_client_authority_filter"]
+ , "srcs": ["src/core/ext/filters/http/client_authority_filter.cc"]
+ , "private-hdrs": ["src/core/ext/filters/http/client_authority_filter.h"]
+ , "deps": ["grpc_base"]
+ }
+, "proto_gen_validate_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["proto_gen_validate_upb"]
+ , "pure C": ["YES"]
+ , "srcs":
+ [ "src/core/ext/upb-generated/gogoproto/gogo.upb.c"
+ , "src/core/ext/upb-generated/validate/validate.upb.c"
+ ]
+ , "hdrs":
+ [["./", "src/core/ext/upb-generated", "proto_gen_validate_upb_headers"]]
+ , "deps": ["google_api_upb", ["@", "upb", ".", "upb"]]
+ }
+, "udpa_orca_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["udpa_orca_upb"]
+ , "pure C": ["YES"]
+ , "srcs":
+ ["src/core/ext/upb-generated/udpa/data/orca/v1/orca_load_report.upb.c"]
+ , "hdrs":
+ [["./", "src/core/ext/upb-generated", "udpa_orca_upb_headers"]]
+ , "deps": ["proto_gen_validate_upb", ["@", "upb", ".", "upb"]]
+ }
+, "grpc_health_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_health_upb"]
+ , "pure C": ["YES"]
+ , "srcs":
+ ["src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c"]
+ , "hdrs":
+ [["./", "src/core/ext/upb-generated", "grpc_health_upb_headers"]]
+ , "deps": [["@", "upb", ".", "upb"]]
+ }
+, "google_api_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["google_api_upb"]
+ , "pure C": ["yes"]
+ , "srcs":
+ [ "src/core/ext/upb-generated/google/api/annotations.upb.c"
+ , "src/core/ext/upb-generated/google/api/http.upb.c"
+ , "src/core/ext/upb-generated/google/protobuf/any.upb.c"
+ , "src/core/ext/upb-generated/google/protobuf/descriptor.upb.c"
+ , "src/core/ext/upb-generated/google/protobuf/duration.upb.c"
+ , "src/core/ext/upb-generated/google/protobuf/empty.upb.c"
+ , "src/core/ext/upb-generated/google/protobuf/struct.upb.c"
+ , "src/core/ext/upb-generated/google/protobuf/timestamp.upb.c"
+ , "src/core/ext/upb-generated/google/protobuf/wrappers.upb.c"
+ , "src/core/ext/upb-generated/google/rpc/status.upb.c"
+ ]
+ , "hdrs": [["./", "src/core/ext/upb-generated", "google_api_upb_headers"]]
+ , "deps": [["@", "upb", ".", "upb"]]
+ }
+, "grpc_lb_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_upb"]
+ , "srcs":
+ ["src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c"]
+ , "hdrs":
+ [["./", "src/core/ext/upb-generated", "grpc_lb_upb_headers"]]
+ , "deps": ["google_api_upb", ["@", "upb", ".", "upb"]]
+ }
+, "grpc_client_channel":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_client_channel"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/backend_metric.cc"
+ , "src/core/ext/filters/client_channel/backup_poller.cc"
+ , "src/core/ext/filters/client_channel/channel_connectivity.cc"
+ , "src/core/ext/filters/client_channel/client_channel.cc"
+ , "src/core/ext/filters/client_channel/client_channel_channelz.cc"
+ , "src/core/ext/filters/client_channel/client_channel_factory.cc"
+ , "src/core/ext/filters/client_channel/client_channel_plugin.cc"
+ , "src/core/ext/filters/client_channel/config_selector.cc"
+ , "src/core/ext/filters/client_channel/global_subchannel_pool.cc"
+ , "src/core/ext/filters/client_channel/health/health_check_client.cc"
+ , "src/core/ext/filters/client_channel/http_connect_handshaker.cc"
+ , "src/core/ext/filters/client_channel/http_proxy.cc"
+ , "src/core/ext/filters/client_channel/lb_policy.cc"
+ , "src/core/ext/filters/client_channel/lb_policy/child_policy_handler.cc"
+ , "src/core/ext/filters/client_channel/lb_policy_registry.cc"
+ , "src/core/ext/filters/client_channel/local_subchannel_pool.cc"
+ , "src/core/ext/filters/client_channel/parse_address.cc"
+ , "src/core/ext/filters/client_channel/proxy_mapper_registry.cc"
+ , "src/core/ext/filters/client_channel/resolver.cc"
+ , "src/core/ext/filters/client_channel/resolver_registry.cc"
+ , "src/core/ext/filters/client_channel/resolver_result_parsing.cc"
+ , "src/core/ext/filters/client_channel/resolving_lb_policy.cc"
+ , "src/core/ext/filters/client_channel/retry_throttle.cc"
+ , "src/core/ext/filters/client_channel/server_address.cc"
+ , "src/core/ext/filters/client_channel/service_config.cc"
+ , "src/core/ext/filters/client_channel/service_config_channel_arg_filter.cc"
+ , "src/core/ext/filters/client_channel/service_config_parser.cc"
+ , "src/core/ext/filters/client_channel/subchannel.cc"
+ , "src/core/ext/filters/client_channel/subchannel_pool_interface.cc"
+ ]
+ , "hdrs":
+ [ "src/core/ext/filters/client_channel/backend_metric.h"
+ , "src/core/ext/filters/client_channel/backup_poller.h"
+ , "src/core/ext/filters/client_channel/client_channel.h"
+ , "src/core/ext/filters/client_channel/client_channel_channelz.h"
+ , "src/core/ext/filters/client_channel/client_channel_factory.h"
+ , "src/core/ext/filters/client_channel/config_selector.h"
+ , "src/core/ext/filters/client_channel/connector.h"
+ , "src/core/ext/filters/client_channel/global_subchannel_pool.h"
+ , "src/core/ext/filters/client_channel/health/health_check_client.h"
+ , "src/core/ext/filters/client_channel/http_connect_handshaker.h"
+ , "src/core/ext/filters/client_channel/http_proxy.h"
+ , "src/core/ext/filters/client_channel/lb_policy.h"
+ , "src/core/ext/filters/client_channel/lb_policy/child_policy_handler.h"
+ , "src/core/ext/filters/client_channel/lb_policy_factory.h"
+ , "src/core/ext/filters/client_channel/lb_policy_registry.h"
+ , "src/core/ext/filters/client_channel/local_subchannel_pool.h"
+ , "src/core/ext/filters/client_channel/parse_address.h"
+ , "src/core/ext/filters/client_channel/proxy_mapper.h"
+ , "src/core/ext/filters/client_channel/proxy_mapper_registry.h"
+ , "src/core/ext/filters/client_channel/resolver.h"
+ , "src/core/ext/filters/client_channel/resolver_factory.h"
+ , "src/core/ext/filters/client_channel/resolver_registry.h"
+ , "src/core/ext/filters/client_channel/resolver_result_parsing.h"
+ , "src/core/ext/filters/client_channel/resolving_lb_policy.h"
+ , "src/core/ext/filters/client_channel/retry_throttle.h"
+ , "src/core/ext/filters/client_channel/server_address.h"
+ , "src/core/ext/filters/client_channel/service_config.h"
+ , "src/core/ext/filters/client_channel/service_config_call_data.h"
+ , "src/core/ext/filters/client_channel/service_config_parser.h"
+ , "src/core/ext/filters/client_channel/subchannel.h"
+ , "src/core/ext/filters/client_channel/subchannel_interface.h"
+ , "src/core/ext/filters/client_channel/subchannel_pool_interface.h"
+ ]
+ , "deps":
+ [ "gpr_base"
+ , "grpc_base"
+ , "grpc_client_authority_filter"
+ , "grpc_deadline_filter"
+ , "grpc_health_upb"
+ , "orphanable"
+ , "ref_counted"
+ , "ref_counted_ptr"
+ , "udpa_orca_upb"
+ , ["@", "absl", "absl/container", "inlined_vector"]
+ ]
+ }
+, "grpc_client_idle_filter":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_client_idle_filter"]
+ , "srcs": ["src/core/ext/filters/client_idle/client_idle_filter.cc"]
+ , "deps": ["grpc_base"]
+ }
+, "grpc_max_age_filter":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_max_age_filter"]
+ , "srcs": ["src/core/ext/filters/max_age/max_age_filter.cc"]
+ , "private-hdrs": ["src/core/ext/filters/max_age/max_age_filter.h"]
+ , "deps": ["grpc_base"]
+ }
+, "grpc_message_size_filter":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_message_size_filter"]
+ , "srcs": ["src/core/ext/filters/message_size/message_size_filter.cc"]
+ , "hdrs": ["src/core/ext/filters/message_size/message_size_filter.h"]
+ , "deps": ["grpc_base", "grpc_client_channel"]
+ }
+, "grpc_http_filters":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_http_filters"]
+ , "srcs":
+ [ "src/core/ext/filters/http/client/http_client_filter.cc"
+ , "src/core/ext/filters/http/http_filters_plugin.cc"
+ , "src/core/ext/filters/http/message_compress/message_compress_filter.cc"
+ , "src/core/ext/filters/http/message_compress/message_decompress_filter.cc"
+ , "src/core/ext/filters/http/server/http_server_filter.cc"
+ ]
+ , "hdrs":
+ [ "src/core/ext/filters/http/client/http_client_filter.h"
+ , "src/core/ext/filters/http/message_compress/message_compress_filter.h"
+ , "src/core/ext/filters/http/message_compress/message_decompress_filter.h"
+ , "src/core/ext/filters/http/server/http_server_filter.h"
+ ]
+ , "deps": ["grpc_base", "grpc_message_size_filter"]
+ }
+, "grpc_server_backward_compatibility":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_server_backward_compatibility"]
+ , "srcs": ["src/core/ext/filters/workarounds/workaround_utils.cc"]
+ , "hdrs": ["src/core/ext/filters/workarounds/workaround_utils.h"]
+ , "deps": ["grpc_base"]
+ }
+, "grpc_workaround_cronet_compression_filter":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_workaround_cronet_compression_filter"]
+ , "srcs":
+ [ "src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc"
+ ]
+ , "private-hdrs":
+ [ "src/core/ext/filters/workarounds/workaround_cronet_compression_filter.h"
+ ]
+ , "deps": ["grpc_base", "grpc_server_backward_compatibility"]
+ }
+, "grpc_grpclb_balancer_addresses":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_grpclb_balancer_addresses"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_balancer_addresses.cc"
+ ]
+ , "hdrs":
+ [ "src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_balancer_addresses.h"
+ ]
+ , "deps": ["grpc_base", "grpc_client_channel"]
+ }
+, "grpc_lb_subchannel_list":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_subchannel_list"]
+ , "hdrs": ["src/core/ext/filters/client_channel/lb_policy/subchannel_list.h"]
+ , "deps": ["grpc_base", "grpc_client_channel"]
+ }
+, "grpc_lb_policy_pick_first":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_policy_pick_first"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc"
+ ]
+ , "deps": ["grpc_base", "grpc_client_channel", "grpc_lb_subchannel_list"]
+ }
+, "grpc_lb_policy_round_robin":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_policy_round_robin"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc"
+ ]
+ , "deps": ["grpc_base", "grpc_client_channel", "grpc_lb_subchannel_list"]
+ }
+, "grpc_lb_policy_priority":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_policy_priority"]
+ , "srcs":
+ ["src/core/ext/filters/client_channel/lb_policy/priority/priority.cc"]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_client_channel"
+ , "grpc_lb_address_filtering"
+ , ["@", "absl", "absl/strings", "strings"]
+ ]
+ }
+, "grpc_lb_policy_weighted_target":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_policy_weighted_target"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/lb_policy/weighted_target/weighted_target.cc"
+ ]
+ , "deps": ["grpc_base", "grpc_client_channel", "grpc_lb_address_filtering"]
+ }
+, "grpc_resolver_dns_selection":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_resolver_dns_selection"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/resolver/dns/dns_resolver_selection.cc"
+ ]
+ , "hdrs":
+ [ "src/core/ext/filters/client_channel/resolver/dns/dns_resolver_selection.h"
+ ]
+ , "deps": ["grpc_base"]
+ }
+, "grpc_resolver_dns_native":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_resolver_dns_native"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc"
+ ]
+ , "deps": ["grpc_base", "grpc_client_channel", "grpc_resolver_dns_selection"]
+ }
+, "grpc_resolver_dns_ares":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_resolver_dns_ares"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc"
+ , "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.cc"
+ , "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_libuv.cc"
+ , "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc"
+ , "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc"
+ , "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc"
+ , "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_fallback.cc"
+ , "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv.cc"
+ , "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc"
+ , "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc"
+ ]
+ , "private-hdrs":
+ [ "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.h"
+ , "src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.h"
+ ]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_client_channel"
+ , "grpc_grpclb_balancer_addresses"
+ , "grpc_resolver_dns_selection"
+ , ["@", "cares", ".", "ares"]
+ , ["third_party", "address_sorting"]
+ ]
+ }
+, "grpc_resolver_sockaddr":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_resolver_sockaddr"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc"
+ ]
+ , "deps": ["grpc_base", "grpc_client_channel"]
+ }
+, "grpc_resolver_fake":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_resolver_fake"]
+ , "srcs":
+ ["src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc"]
+ , "hdrs":
+ ["src/core/ext/filters/client_channel/resolver/fake/fake_resolver.h"]
+ , "deps": ["grpc_base", "grpc_client_channel"]
+ }
+, "tsi_interface":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["tsi_interface"]
+ , "srcs": ["src/core/tsi/transport_security.cc"]
+ , "hdrs":
+ [ "src/core/tsi/transport_security.h"
+ , "src/core/tsi/transport_security_interface.h"
+ ]
+ , "deps": ["gpr", "grpc_trace"]
+ }
+, "alts_frame_protector":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["alts_frame_protector"]
+ , "srcs":
+ [ "src/core/tsi/alts/crypt/aes_gcm.cc"
+ , "src/core/tsi/alts/crypt/gsec.cc"
+ , "src/core/tsi/alts/frame_protector/alts_counter.cc"
+ , "src/core/tsi/alts/frame_protector/alts_crypter.cc"
+ , "src/core/tsi/alts/frame_protector/alts_frame_protector.cc"
+ , "src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.cc"
+ , "src/core/tsi/alts/frame_protector/alts_seal_privacy_integrity_crypter.cc"
+ , "src/core/tsi/alts/frame_protector/alts_unseal_privacy_integrity_crypter.cc"
+ , "src/core/tsi/alts/frame_protector/frame_handler.cc"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.cc"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.cc"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.cc"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.cc"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.cc"
+ ]
+ , "hdrs":
+ [ "src/core/tsi/alts/crypt/gsec.h"
+ , "src/core/tsi/alts/frame_protector/alts_counter.h"
+ , "src/core/tsi/alts/frame_protector/alts_crypter.h"
+ , "src/core/tsi/alts/frame_protector/alts_frame_protector.h"
+ , "src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.h"
+ , "src/core/tsi/alts/frame_protector/frame_handler.h"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.h"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.h"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol.h"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.h"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.h"
+ , "src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.h"
+ , "src/core/tsi/transport_security_grpc.h"
+ ]
+ , "deps":
+ [ "gpr"
+ , "grpc_base"
+ , "tsi_interface"
+ , ["@", "libssl", ".", "ssl"]
+ ]
+ }
+, "alts_util":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["alts_util"]
+ , "srcs":
+ [ "src/core/lib/security/credentials/alts/check_gcp_environment.cc"
+ , "src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc"
+ , "src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc"
+ , "src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc"
+ , "src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc"
+ , "src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc"
+ , "src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc"
+ , "src/core/tsi/alts/handshaker/transport_security_common_api.cc"
+ ]
+ , "hdrs":
+ [ "src/core/lib/security/credentials/alts/check_gcp_environment.h"
+ , "src/core/lib/security/credentials/alts/grpc_alts_credentials_options.h"
+ , "src/core/tsi/alts/handshaker/transport_security_common_api.h"
+ , ["./", "include/grpc", "grpc_secure_public_headers"]
+ ]
+ , "deps":
+ [ ["./", "src/core/ext/upb-generated", "alts_upb"]
+ , "gpr"
+ , "grpc_base"
+ ]
+ }
+, "tsi":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["tsi"]
+ , "srcs":
+ [ "src/core/tsi/alts/handshaker/alts_handshaker_client.cc"
+ , "src/core/tsi/alts/handshaker/alts_shared_resource.cc"
+ , "src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc"
+ , "src/core/tsi/alts/handshaker/alts_tsi_utils.cc"
+ , "src/core/tsi/fake_transport_security.cc"
+ , "src/core/tsi/local_transport_security.cc"
+ , "src/core/tsi/ssl/session_cache/ssl_session_boringssl.cc"
+ , "src/core/tsi/ssl/session_cache/ssl_session_cache.cc"
+ , "src/core/tsi/ssl/session_cache/ssl_session_openssl.cc"
+ , "src/core/tsi/ssl_transport_security.cc"
+ , "src/core/tsi/transport_security_grpc.cc"
+ ]
+ , "hdrs":
+ [ "src/core/tsi/alts/handshaker/alts_handshaker_client.h"
+ , "src/core/tsi/alts/handshaker/alts_shared_resource.h"
+ , "src/core/tsi/alts/handshaker/alts_tsi_handshaker.h"
+ , "src/core/tsi/alts/handshaker/alts_tsi_handshaker_private.h"
+ , "src/core/tsi/alts/handshaker/alts_tsi_utils.h"
+ , "src/core/tsi/fake_transport_security.h"
+ , "src/core/tsi/local_transport_security.h"
+ , "src/core/tsi/ssl/session_cache/ssl_session.h"
+ , "src/core/tsi/ssl/session_cache/ssl_session_cache.h"
+ , "src/core/tsi/ssl_transport_security.h"
+ , "src/core/tsi/ssl_types.h"
+ , "src/core/tsi/transport_security_grpc.h"
+ ]
+ , "deps":
+ [ "alts_frame_protector"
+ , "alts_util"
+ , "gpr"
+ , "grpc_base"
+ , "grpc_transport_chttp2_client_insecure"
+ , "tsi_interface"
+ , ["@", "libssl", ".", "ssl"]
+ ]
+ }
+, "grpc_transport_chttp2_alpn":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_transport_chttp2_alpn"]
+ , "srcs": ["src/core/ext/transport/chttp2/alpn/alpn.cc"]
+ , "hdrs": ["src/core/ext/transport/chttp2/alpn/alpn.h"]
+ , "deps": ["gpr"]
+ }
+, "grpc_lb_policy_grpclb_secure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_policy_grpclb_secure"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc"
+ , "src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc"
+ , "src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel_secure.cc"
+ , "src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc"
+ , "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc"
+ ]
+ , "private-hdrs":
+ [ "src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.h"
+ , "src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.h"
+ , "src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel.h"
+ , "src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.h"
+ , "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h"
+ ]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_client_channel"
+ , "grpc_grpclb_balancer_addresses"
+ , "grpc_lb_upb"
+ , "grpc_resolver_fake"
+ , "grpc_secure"
+ , "grpc_transport_chttp2_client_secure"
+ ]
+ }
+, "grpc_xds_api_header":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_xds_api_header"]
+ , "hdrs":
+ [ "src/core/ext/filters/client_channel/xds/xds_api.h"
+ , "src/core/ext/filters/client_channel/xds/xds_bootstrap.h"
+ , "src/core/ext/filters/client_channel/xds/xds_client_stats.h"
+ ]
+ , "deps":
+ [ ["@", "upb", ".", "upb"]
+ , ["@", "re2", ".", "re2"]
+ , "grpc_base"
+ ]
+ }
+, "grpc_xds_client_secure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_xds_client_secure"]
+ , "srcs":
+ [ "src/core/ext/filters/client_channel/xds/xds_api.cc"
+ , "src/core/ext/filters/client_channel/xds/xds_bootstrap.cc"
+ , "src/core/ext/filters/client_channel/xds/xds_channel_secure.cc"
+ , "src/core/ext/filters/client_channel/xds/xds_client.cc"
+ , "src/core/ext/filters/client_channel/xds/xds_client_stats.cc"
+ ]
+ , "hdrs":
+ [ "src/core/ext/filters/client_channel/xds/xds_channel.h"
+ , "src/core/ext/filters/client_channel/xds/xds_channel_args.h"
+ , "src/core/ext/filters/client_channel/xds/xds_client.h"
+ ]
+ , "deps":
+ [ "envoy_ads_upb"
+ , "grpc_base"
+ , "grpc_client_channel"
+ , "grpc_secure"
+ , "grpc_xds_api_header"
+ ]
+ }
+, "grpc_lb_address_filtering":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_address_filtering"]
+ , "srcs":
+ ["src/core/ext/filters/client_channel/lb_policy/address_filtering.cc"]
+ , "hdrs":
+ ["src/core/ext/filters/client_channel/lb_policy/address_filtering.h"]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_client_channel"
+ , ["@", "absl", "absl/strings", "strings"]
+ ]
+ }
+, "grpc_lb_policy_cds_secure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_policy_cds_secure"]
+ , "srcs": ["src/core/ext/filters/client_channel/lb_policy/xds/cds.cc"]
+ , "deps": ["grpc_base", "grpc_client_channel", "grpc_xds_client_secure"]
+ }
+, "grpc_lb_policy_eds_secure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_policy_eds_secure"]
+ , "srcs": ["src/core/ext/filters/client_channel/lb_policy/xds/eds.cc"]
+ , "private-hdrs": ["src/core/ext/filters/client_channel/lb_policy/xds/xds.h"]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_client_channel"
+ , "grpc_lb_address_filtering"
+ , "grpc_xds_client_secure"
+ ]
+ }
+, "grpc_lb_policy_lrs_secure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_policy_lrs_secure"]
+ , "srcs": ["src/core/ext/filters/client_channel/lb_policy/xds/lrs.cc"]
+ , "deps": ["grpc_base", "grpc_client_channel", "grpc_xds_client_secure"]
+ }
+, "grpc_lb_policy_xds_routing":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_lb_policy_xds_routing"]
+ , "srcs": ["src/core/ext/filters/client_channel/lb_policy/xds/xds_routing.cc"]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_client_channel"
+ , "grpc_xds_api_header"
+ , ["@", "absl", "absl/strings", "strings"]
+ ]
+ }
+, "grpc_resolver_xds_secure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_resolver_xds_secure"]
+ , "srcs": ["src/core/ext/filters/client_channel/resolver/xds/xds_resolver.cc"]
+ , "private-hdrs": ["src/core/ext/transport/chttp2/alpn/alpn.h"]
+ , "deps": ["grpc_base", "grpc_client_channel", "grpc_xds_client_secure"]
+ }
+, "grpc_secure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_secure"]
+ , "srcs":
+ [ "src/core/lib/http/httpcli_security_connector.cc"
+ , "src/core/lib/security/context/security_context.cc"
+ , "src/core/lib/security/credentials/alts/alts_credentials.cc"
+ , "src/core/lib/security/credentials/composite/composite_credentials.cc"
+ , "src/core/lib/security/credentials/credentials.cc"
+ , "src/core/lib/security/credentials/credentials_metadata.cc"
+ , "src/core/lib/security/credentials/fake/fake_credentials.cc"
+ , "src/core/lib/security/credentials/google_default/credentials_generic.cc"
+ , "src/core/lib/security/credentials/google_default/google_default_credentials.cc"
+ , "src/core/lib/security/credentials/iam/iam_credentials.cc"
+ , "src/core/lib/security/credentials/jwt/json_token.cc"
+ , "src/core/lib/security/credentials/jwt/jwt_credentials.cc"
+ , "src/core/lib/security/credentials/jwt/jwt_verifier.cc"
+ , "src/core/lib/security/credentials/local/local_credentials.cc"
+ , "src/core/lib/security/credentials/oauth2/oauth2_credentials.cc"
+ , "src/core/lib/security/credentials/plugin/plugin_credentials.cc"
+ , "src/core/lib/security/credentials/ssl/ssl_credentials.cc"
+ , "src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc"
+ , "src/core/lib/security/credentials/tls/tls_credentials.cc"
+ , "src/core/lib/security/security_connector/alts/alts_security_connector.cc"
+ , "src/core/lib/security/security_connector/fake/fake_security_connector.cc"
+ , "src/core/lib/security/security_connector/load_system_roots_fallback.cc"
+ , "src/core/lib/security/security_connector/load_system_roots_linux.cc"
+ , "src/core/lib/security/security_connector/local/local_security_connector.cc"
+ , "src/core/lib/security/security_connector/security_connector.cc"
+ , "src/core/lib/security/security_connector/ssl/ssl_security_connector.cc"
+ , "src/core/lib/security/security_connector/ssl_utils.cc"
+ , "src/core/lib/security/security_connector/ssl_utils_config.cc"
+ , "src/core/lib/security/security_connector/tls/tls_security_connector.cc"
+ , "src/core/lib/security/transport/client_auth_filter.cc"
+ , "src/core/lib/security/transport/secure_endpoint.cc"
+ , "src/core/lib/security/transport/security_handshaker.cc"
+ , "src/core/lib/security/transport/server_auth_filter.cc"
+ , "src/core/lib/security/transport/target_authority_table.cc"
+ , "src/core/lib/security/transport/tsi_error.cc"
+ , "src/core/lib/security/util/json_util.cc"
+ , "src/core/lib/surface/init_secure.cc"
+ ]
+ , "hdrs":
+ [ "src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.h"
+ , "src/core/ext/filters/client_channel/xds/xds_channel_args.h"
+ , "src/core/lib/security/context/security_context.h"
+ , "src/core/lib/security/credentials/alts/alts_credentials.h"
+ , "src/core/lib/security/credentials/composite/composite_credentials.h"
+ , "src/core/lib/security/credentials/credentials.h"
+ , "src/core/lib/security/credentials/fake/fake_credentials.h"
+ , "src/core/lib/security/credentials/google_default/google_default_credentials.h"
+ , "src/core/lib/security/credentials/iam/iam_credentials.h"
+ , "src/core/lib/security/credentials/jwt/json_token.h"
+ , "src/core/lib/security/credentials/jwt/jwt_credentials.h"
+ , "src/core/lib/security/credentials/jwt/jwt_verifier.h"
+ , "src/core/lib/security/credentials/local/local_credentials.h"
+ , "src/core/lib/security/credentials/oauth2/oauth2_credentials.h"
+ , "src/core/lib/security/credentials/plugin/plugin_credentials.h"
+ , "src/core/lib/security/credentials/ssl/ssl_credentials.h"
+ , "src/core/lib/security/credentials/tls/grpc_tls_credentials_options.h"
+ , "src/core/lib/security/credentials/tls/tls_credentials.h"
+ , "src/core/lib/security/security_connector/alts/alts_security_connector.h"
+ , "src/core/lib/security/security_connector/fake/fake_security_connector.h"
+ , "src/core/lib/security/security_connector/load_system_roots.h"
+ , "src/core/lib/security/security_connector/load_system_roots_linux.h"
+ , "src/core/lib/security/security_connector/local/local_security_connector.h"
+ , "src/core/lib/security/security_connector/security_connector.h"
+ , "src/core/lib/security/security_connector/ssl/ssl_security_connector.h"
+ , "src/core/lib/security/security_connector/ssl_utils.h"
+ , "src/core/lib/security/security_connector/ssl_utils_config.h"
+ , "src/core/lib/security/security_connector/tls/tls_security_connector.h"
+ , "src/core/lib/security/transport/auth_filters.h"
+ , "src/core/lib/security/transport/secure_endpoint.h"
+ , "src/core/lib/security/transport/security_handshaker.h"
+ , "src/core/lib/security/transport/target_authority_table.h"
+ , "src/core/lib/security/transport/tsi_error.h"
+ , "src/core/lib/security/util/json_util.h"
+ , ["./", "include/grpc", "grpc_secure_public_headers"]
+ ]
+ , "deps":
+ [ "alts_util"
+ , "grpc_base"
+ , "grpc_transport_chttp2_alpn"
+ , "tsi"
+ ]
+ }
+, "grpc_transport_chttp2":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_transport_chttp2"]
+ , "srcs":
+ [ "src/core/ext/transport/chttp2/transport/bin_decoder.cc"
+ , "src/core/ext/transport/chttp2/transport/bin_encoder.cc"
+ , "src/core/ext/transport/chttp2/transport/chttp2_plugin.cc"
+ , "src/core/ext/transport/chttp2/transport/chttp2_transport.cc"
+ , "src/core/ext/transport/chttp2/transport/context_list.cc"
+ , "src/core/ext/transport/chttp2/transport/flow_control.cc"
+ , "src/core/ext/transport/chttp2/transport/frame_data.cc"
+ , "src/core/ext/transport/chttp2/transport/frame_goaway.cc"
+ , "src/core/ext/transport/chttp2/transport/frame_ping.cc"
+ , "src/core/ext/transport/chttp2/transport/frame_rst_stream.cc"
+ , "src/core/ext/transport/chttp2/transport/frame_settings.cc"
+ , "src/core/ext/transport/chttp2/transport/frame_window_update.cc"
+ , "src/core/ext/transport/chttp2/transport/hpack_encoder.cc"
+ , "src/core/ext/transport/chttp2/transport/hpack_parser.cc"
+ , "src/core/ext/transport/chttp2/transport/hpack_table.cc"
+ , "src/core/ext/transport/chttp2/transport/http2_settings.cc"
+ , "src/core/ext/transport/chttp2/transport/huffsyms.cc"
+ , "src/core/ext/transport/chttp2/transport/incoming_metadata.cc"
+ , "src/core/ext/transport/chttp2/transport/parsing.cc"
+ , "src/core/ext/transport/chttp2/transport/stream_lists.cc"
+ , "src/core/ext/transport/chttp2/transport/stream_map.cc"
+ , "src/core/ext/transport/chttp2/transport/varint.cc"
+ , "src/core/ext/transport/chttp2/transport/writing.cc"
+ ]
+ , "hdrs":
+ [ "src/core/ext/transport/chttp2/transport/bin_decoder.h"
+ , "src/core/ext/transport/chttp2/transport/bin_encoder.h"
+ , "src/core/ext/transport/chttp2/transport/chttp2_transport.h"
+ , "src/core/ext/transport/chttp2/transport/context_list.h"
+ , "src/core/ext/transport/chttp2/transport/flow_control.h"
+ , "src/core/ext/transport/chttp2/transport/frame.h"
+ , "src/core/ext/transport/chttp2/transport/frame_data.h"
+ , "src/core/ext/transport/chttp2/transport/frame_goaway.h"
+ , "src/core/ext/transport/chttp2/transport/frame_ping.h"
+ , "src/core/ext/transport/chttp2/transport/frame_rst_stream.h"
+ , "src/core/ext/transport/chttp2/transport/frame_settings.h"
+ , "src/core/ext/transport/chttp2/transport/frame_window_update.h"
+ , "src/core/ext/transport/chttp2/transport/hpack_encoder.h"
+ , "src/core/ext/transport/chttp2/transport/hpack_parser.h"
+ , "src/core/ext/transport/chttp2/transport/hpack_table.h"
+ , "src/core/ext/transport/chttp2/transport/http2_settings.h"
+ , "src/core/ext/transport/chttp2/transport/huffsyms.h"
+ , "src/core/ext/transport/chttp2/transport/incoming_metadata.h"
+ , "src/core/ext/transport/chttp2/transport/internal.h"
+ , "src/core/ext/transport/chttp2/transport/stream_map.h"
+ , "src/core/ext/transport/chttp2/transport/varint.h"
+ ]
+ , "deps":
+ [ "gpr_base"
+ , "grpc_base"
+ , "grpc_http_filters"
+ , "grpc_transport_chttp2_alpn"
+ ]
+ }
+, "grpc_transport_chttp2_client_connector":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_transport_chttp2_client_connector"]
+ , "srcs":
+ [ "src/core/ext/transport/chttp2/client/authority.cc"
+ , "src/core/ext/transport/chttp2/client/chttp2_connector.cc"
+ ]
+ , "hdrs":
+ [ "src/core/ext/transport/chttp2/client/authority.h"
+ , "src/core/ext/transport/chttp2/client/chttp2_connector.h"
+ ]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_client_channel"
+ , "grpc_transport_chttp2"
+ ]
+ }
+, "grpc_transport_chttp2_client_insecure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_transport_chttp2_client_insecure"]
+ , "srcs":
+ [ "src/core/ext/transport/chttp2/client/insecure/channel_create.cc"
+ , "src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc"
+ ]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_client_channel"
+ , "grpc_transport_chttp2"
+ , "grpc_transport_chttp2_client_connector"
+ ]
+ }
+, "grpc_transport_chttp2_client_secure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_transport_chttp2_client_secure"]
+ , "srcs":
+ ["src/core/ext/transport/chttp2/client/secure/secure_channel_create.cc"]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_client_channel"
+ , "grpc_secure"
+ , "grpc_transport_chttp2"
+ , "grpc_transport_chttp2_client_connector"
+ ]
+ }
+, "grpc_transport_chttp2_server":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_transport_chttp2_server"]
+ , "srcs": ["src/core/ext/transport/chttp2/server/chttp2_server.cc"]
+ , "hdrs": ["src/core/ext/transport/chttp2/server/chttp2_server.h"]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_transport_chttp2"
+ ]
+ }
+, "grpc_transport_chttp2_server_insecure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_transport_chttp2_server_insecure"]
+ , "srcs":
+ [ "src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc"
+ , "src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc"
+ ]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_transport_chttp2"
+ , "grpc_transport_chttp2_server"
+ ]
+ }
+, "grpc_transport_chttp2_server_secure":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_transport_chttp2_server_secure"]
+ , "srcs":
+ [ "src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.cc"
+ ]
+ , "deps":
+ [ "grpc_base"
+ , "grpc_secure"
+ , "grpc_transport_chttp2"
+ , "grpc_transport_chttp2_server"
+ ]
+ }
+, "grpc_transport_inproc":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_transport_inproc"]
+ , "srcs":
+ [ "src/core/ext/transport/inproc/inproc_plugin.cc"
+ , "src/core/ext/transport/inproc/inproc_transport.cc"
+ ]
+ , "hdrs": ["src/core/ext/transport/inproc/inproc_transport.h"]
+ , "deps": ["grpc_base"]
+ }
+, "envoy_annotations_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["envoy_annotations_upb"]
+ , "srcs":
+ [ "src/core/ext/upb-generated/envoy/annotations/deprecation.upb.c"
+ , "src/core/ext/upb-generated/envoy/annotations/resource.upb.c"
+ ]
+ , "hdrs":
+ [["./", "src/core/ext/upb-generated", "envoy_annotations_upb_headers"]]
+ , "deps": ["google_api_upb", ["@", "upb", ".", "upb"]]
+ }
+, "udpa_annotations_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["udpa_annotations_upb"]
+ , "pure C": ["YES"]
+ , "srcs":
+ [ "src/core/ext/upb-generated/udpa/annotations/migrate.upb.c"
+ , "src/core/ext/upb-generated/udpa/annotations/sensitive.upb.c"
+ , "src/core/ext/upb-generated/udpa/annotations/status.upb.c"
+ ]
+ , "hdrs":
+ [["./", "src/core/ext/upb-generated", "udpa_annotations_upb_headers"]]
+ , "deps": ["google_api_upb", ["@", "upb", ".", "upb"]]
+ }
+, "envoy_type_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["envoy_type_upb"]
+ , "pure C": ["YES"]
+ , "srcs":
+ [ "src/core/ext/upb-generated/envoy/type/http.upb.c"
+ , "src/core/ext/upb-generated/envoy/type/matcher/regex.upb.c"
+ , "src/core/ext/upb-generated/envoy/type/matcher/string.upb.c"
+ , "src/core/ext/upb-generated/envoy/type/metadata/v2/metadata.upb.c"
+ , "src/core/ext/upb-generated/envoy/type/percent.upb.c"
+ , "src/core/ext/upb-generated/envoy/type/range.upb.c"
+ , "src/core/ext/upb-generated/envoy/type/semantic_version.upb.c"
+ , "src/core/ext/upb-generated/envoy/type/tracing/v2/custom_tag.upb.c"
+ ]
+ , "hdrs":
+ [["./", "src/core/ext/upb-generated", "envoy_type_upb_headers"]]
+ , "deps":
+ [ "envoy_annotations_upb"
+ , "google_api_upb"
+ , "proto_gen_validate_upb"
+ , "udpa_annotations_upb"
+ , ["@", "upb", ".", "upb"]
+ ]
+ }
+, "envoy_core_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["envoy_core_upb"]
+ , "pure C": ["yes"]
+ , "srcs":
+ [ "src/core/ext/upb-generated/envoy/api/v2/core/address.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/core/backoff.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/core/base.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/core/config_source.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/core/event_service_config.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/core/grpc_service.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/core/health_check.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/core/http_uri.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/core/protocol.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/core/socket_option.upb.c"
+ ]
+ , "hdrs":
+ [["./", "src/core/ext/upb-generated", "envoy_core_upb_headers"]]
+ , "deps":
+ [ "envoy_annotations_upb"
+ , "envoy_type_upb"
+ , "google_api_upb"
+ , "proto_gen_validate_upb"
+ , "udpa_annotations_upb"
+ ]
+ }
+, "envoy_ads_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["envoy_ads_upb"]
+ , "pure C": ["yes"]
+ , "srcs":
+ [ "src/core/ext/upb-generated/envoy/api/v2/auth/cert.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/auth/common.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/auth/secret.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/auth/tls.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/cds.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/cluster.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/cluster/circuit_breaker.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/cluster/filter.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/cluster/outlier_detection.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/discovery.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/eds.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/endpoint.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/endpoint/endpoint.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/endpoint/endpoint_components.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/endpoint/load_report.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/lds.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/listener.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/listener/listener.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/listener/listener_components.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/listener/udp_listener_config.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/rds.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/route.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/route/route.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/route/route_components.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/scoped_route.upb.c"
+ , "src/core/ext/upb-generated/envoy/api/v2/srds.upb.c"
+ , "src/core/ext/upb-generated/envoy/config/filter/accesslog/v2/accesslog.upb.c"
+ , "src/core/ext/upb-generated/envoy/config/filter/network/http_connection_manager/v2/http_connection_manager.upb.c"
+ , "src/core/ext/upb-generated/envoy/config/listener/v2/api_listener.upb.c"
+ , "src/core/ext/upb-generated/envoy/config/trace/v2/http_tracer.upb.c"
+ , "src/core/ext/upb-generated/envoy/service/discovery/v2/ads.upb.c"
+ , "src/core/ext/upb-generated/envoy/service/load_stats/v2/lrs.upb.c"
+ ]
+ , "hdrs":
+ [["./", "src/core/ext/upb-generated", "envoy_ads_upb_headers"]]
+ , "deps":
+ [ "envoy_annotations_upb"
+ , "envoy_core_upb"
+ , "envoy_type_upb"
+ , "google_api_upb"
+ , "proto_gen_validate_upb"
+ , "udpa_annotations_upb"
+ , ["@", "upb", ".", "upb"]
+ ]
+ }
+, "grpc_common":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_common"]
+ , "deps":
+ [ "grpc_base"
+ , "census"
+ , "grpc_deadline_filter"
+ , "grpc_client_authority_filter"
+ , "grpc_lb_policy_pick_first"
+ , "grpc_lb_policy_priority"
+ , "grpc_lb_policy_round_robin"
+ , "grpc_lb_policy_weighted_target"
+ , "grpc_client_idle_filter"
+ , "grpc_max_age_filter"
+ , "grpc_message_size_filter"
+ , "grpc_resolver_dns_ares"
+ , "grpc_resolver_fake"
+ , "grpc_resolver_dns_native"
+ , "grpc_resolver_sockaddr"
+ , "grpc_transport_chttp2_client_insecure"
+ , "grpc_transport_chttp2_server_insecure"
+ , "grpc_transport_inproc"
+ , "grpc_workaround_cronet_compression_filter"
+ , "grpc_server_backward_compatibility"
+ ]
+ }
+, "grpc":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc"]
+ , "srcs":
+ [ "src/core/lib/surface/init.cc"
+ , "src/core/plugin_registry/grpc_plugin_registry.cc"
+ ]
+ , "hdrs":
+ [ ["./", "include/grpc", "grpc_public_headers"]
+ , ["./", "include/grpc", "grpc_secure_public_headers"]
+ ]
+ , "deps":
+ [ "grpc_common"
+ , "grpc_lb_policy_cds_secure"
+ , "grpc_lb_policy_eds_secure"
+ , "grpc_lb_policy_grpclb_secure"
+ , "grpc_lb_policy_lrs_secure"
+ , "grpc_lb_policy_xds_routing"
+ , "grpc_resolver_xds_secure"
+ , "grpc_secure"
+ , "grpc_transport_chttp2_client_secure"
+ , "grpc_transport_chttp2_server_secure"
+ ]
+ }
+, "grpc++_sources":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "src/cpp/client/channel_cc.cc"
+ , "src/cpp/client/client_callback.cc"
+ , "src/cpp/client/client_context.cc"
+ , "src/cpp/client/client_interceptor.cc"
+ , "src/cpp/client/create_channel.cc"
+ , "src/cpp/client/create_channel_internal.cc"
+ , "src/cpp/client/create_channel_posix.cc"
+ , "src/cpp/client/credentials_cc.cc"
+ , "src/cpp/common/alarm.cc"
+ , "src/cpp/common/channel_arguments.cc"
+ , "src/cpp/common/channel_filter.cc"
+ , "src/cpp/common/completion_queue_cc.cc"
+ , "src/cpp/common/core_codegen.cc"
+ , "src/cpp/common/resource_quota_cc.cc"
+ , "src/cpp/common/rpc_method.cc"
+ , "src/cpp/common/version_cc.cc"
+ , "src/cpp/common/validate_service_config.cc"
+ , "src/cpp/server/async_generic_service.cc"
+ , "src/cpp/server/channel_argument_option.cc"
+ , "src/cpp/server/create_default_thread_pool.cc"
+ , "src/cpp/server/dynamic_thread_pool.cc"
+ , "src/cpp/server/external_connection_acceptor_impl.cc"
+ , "src/cpp/server/health/default_health_check_service.cc"
+ , "src/cpp/server/health/health_check_service.cc"
+ , "src/cpp/server/health/health_check_service_server_builder_option.cc"
+ , "src/cpp/server/server_builder.cc"
+ , "src/cpp/server/server_callback.cc"
+ , "src/cpp/server/server_cc.cc"
+ , "src/cpp/server/server_context.cc"
+ , "src/cpp/server/server_credentials.cc"
+ , "src/cpp/server/server_posix.cc"
+ , "src/cpp/thread_manager/thread_manager.cc"
+ , "src/cpp/util/byte_buffer_cc.cc"
+ , "src/cpp/util/status.cc"
+ , "src/cpp/util/string_ref.cc"
+ , "src/cpp/util/time_cc.cc"
+ ]
+ }
+, "grpc++_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "src/cpp/client/create_channel_internal.h"
+ , "src/cpp/common/channel_filter.h"
+ , "src/cpp/server/dynamic_thread_pool.h"
+ , "src/cpp/server/external_connection_acceptor_impl.h"
+ , "src/cpp/server/health/default_health_check_service.h"
+ , "src/cpp/server/thread_pool_interface.h"
+ , "src/cpp/thread_manager/thread_manager.h"
+ ]
+ }
+, "grpc++_codegen_base":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc++_codegen_base"]
+ , "hdrs":
+ [ ["./", "include/grpc++", "grpc++_codegen_base_headers"]
+ , ["./", "include/grpcpp", "grpcpp_codegen_base_headers"]
+ ]
+ , "deps":
+ [ ["./", "include/grpcpp", "grpc++_internal_hdrs_only"]
+ , ["./", "include/grpc", "grpc_codegen"]
+ ]
+ }
+, "grpc++_codegen_base_src":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc++_codegen_base_src"]
+ , "srcs": ["src/cpp/codegen/codegen_init.cc"]
+ , "deps": ["grpc++_codegen_base"]
+ }
+, "grpc++_config_proto":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc++_config_proto"]
+ , "hdrs":
+ [ ["./", "include/grpc++", "grpc++_config_proto_headers"]
+ , ["./", "include/grpcpp", "grpcpp_config_proto_headers"]
+ ]
+ , "deps": [["@", "protobuf", ".", "protobuf_headers"]]
+ }
+, "grpc++_codegen_proto":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc++_codegen_proto"]
+ , "hdrs":
+ [ ["./", "include/grpc++", "grpc++_codegen_proto_headers"]
+ , ["./", "include/grpcpp", "grpcpp_codegen_proto_headers"]
+ ]
+ , "deps": ["grpc++_codegen_base", "grpc++_config_proto"]
+ }
+, "grpc++_base":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc++_base"]
+ , "srcs": ["grpc++_sources"]
+ , "hdrs": ["grpc++_public_headers" , "grpc++_headers"]
+ , "deps":
+ [ "grpc"
+ , "grpc++_codegen_base"
+ , "grpc++_codegen_base_src"
+ , "grpc_health_upb"
+ ]
+ }
+, "grpc++":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc++"]
+ , "srcs":
+ [ "src/cpp/client/insecure_credentials.cc"
+ , "src/cpp/client/secure_credentials.cc"
+ , "src/cpp/common/auth_property_iterator.cc"
+ , "src/cpp/common/secure_auth_context.cc"
+ , "src/cpp/common/secure_channel_arguments.cc"
+ , "src/cpp/common/secure_create_auth_context.cc"
+ , "src/cpp/common/tls_credentials_options.cc"
+ , "src/cpp/common/tls_credentials_options_util.cc"
+ , "src/cpp/server/insecure_server_credentials.cc"
+ , "src/cpp/server/secure_server_credentials.cc"
+ ]
+ , "hdrs": ["grpc++_public_headers"]
+ , "private-hdrs":
+ [ "src/cpp/client/secure_credentials.h"
+ , "src/cpp/common/secure_auth_context.h"
+ , "src/cpp/common/tls_credentials_options_util.h"
+ , "src/cpp/server/secure_server_credentials.h"
+ ]
+ , "deps":
+ [ "gpr"
+ , "grpc"
+ , "grpc++_base"
+ , "grpc++_codegen_base"
+ , "grpc++_codegen_base_src"
+ , "grpc++_codegen_proto"
+ , "grpc_secure"
+ ]
+ }
+}
diff --git a/etc/import/TARGETS.gsl b/etc/import/TARGETS.gsl
new file mode 100644
index 00000000..df06ea7e
--- /dev/null
+++ b/etc/import/TARGETS.gsl
@@ -0,0 +1,8 @@
+{ "gsl-lite":
+ {"type": ["@", "rules", "CC", "library"], "hdrs": ["gsl-lite hdrs"]}
+, "gsl-lite hdrs":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "hdrs": ["gsl", "gsl-lite.h", "gsl-lite.hpp", "gsl-lite-vc6.hpp"]
+ , "stage": ["gsl-lite"]
+ }
+}
diff --git a/etc/import/TARGETS.json b/etc/import/TARGETS.json
new file mode 100644
index 00000000..556c7c1d
--- /dev/null
+++ b/etc/import/TARGETS.json
@@ -0,0 +1,45 @@
+{ "json": {"type": ["@", "rules", "CC", "library"], "hdrs": ["json headers"]}
+, "json headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["nlohmann"]
+ , "hdrs":
+ [ "adl_serializer.hpp"
+ , "byte_container_with_subtype.hpp"
+ , "detail/conversions/from_json.hpp"
+ , "detail/conversions/to_chars.hpp"
+ , "detail/conversions/to_json.hpp"
+ , "detail/exceptions.hpp"
+ , "detail/hash.hpp"
+ , "detail/input/binary_reader.hpp"
+ , "detail/input/input_adapters.hpp"
+ , "detail/input/json_sax.hpp"
+ , "detail/input/lexer.hpp"
+ , "detail/input/parser.hpp"
+ , "detail/input/position_t.hpp"
+ , "detail/iterators/internal_iterator.hpp"
+ , "detail/iterators/iteration_proxy.hpp"
+ , "detail/iterators/iterator_traits.hpp"
+ , "detail/iterators/iter_impl.hpp"
+ , "detail/iterators/json_reverse_iterator.hpp"
+ , "detail/iterators/primitive_iterator.hpp"
+ , "detail/json_pointer.hpp"
+ , "detail/json_ref.hpp"
+ , "detail/macro_scope.hpp"
+ , "detail/macro_unscope.hpp"
+ , "detail/meta/cpp_future.hpp"
+ , "detail/meta/detected.hpp"
+ , "detail/meta/is_sax.hpp"
+ , "detail/meta/type_traits.hpp"
+ , "detail/meta/void_t.hpp"
+ , "detail/output/binary_writer.hpp"
+ , "detail/output/output_adapters.hpp"
+ , "detail/output/serializer.hpp"
+ , "detail/value_t.hpp"
+ , "json_fwd.hpp"
+ , "json.hpp"
+ , "ordered_map.hpp"
+ , "thirdparty/hedley/hedley.hpp"
+ , "thirdparty/hedley/hedley_undef.hpp"
+ ]
+ }
+}
diff --git a/etc/import/TARGETS.protobuf b/etc/import/TARGETS.protobuf
new file mode 100644
index 00000000..e4587cef
--- /dev/null
+++ b/etc/import/TARGETS.protobuf
@@ -0,0 +1,37 @@
+{ "protoc":
+ { "type": "export"
+ , "doc":
+ [ "The protobuffer compiler."
+ , ""
+ , "This target typically is used as an implict dependency of"
+ , "the protobuffer rules."
+ ]
+ , "target": ["src/google/protobuf", "protoc"]
+ , "flexible_config":
+ ["OS", "ARCH", "HOST_ARCH", "TARGET_ARCH", "CXX", "AR", "ENV"]
+ }
+, "protoc_lib":
+ { "type": "export"
+ , "doc": []
+ , "target": ["src/google/protobuf", "protoc_lib"]
+ , "flexible_config":
+ ["OS", "ARCH", "HOST_ARCH", "TARGET_ARCH", "CXX", "AR", "ENV"]
+ }
+, "protobuf_headers":
+ { "type": "export"
+ , "doc": [""]
+ , "target": ["src/google/protobuf", "protobuf_headers"]
+ }
+, "C++ runtime":
+ { "type": "export"
+ , "doc": ["The library every generated C++ proto file depends upon"]
+ , "target": ["src/google/protobuf", "protobuf"]
+ , "flexible_config":
+ ["OS", "ARCH", "HOST_ARCH", "TARGET_ARCH", "CXX", "AR", "ENV"]
+ }
+, "well_known_protos":
+ { "type": "export"
+ , "doc": ["Standard well known protobufs"]
+ , "target": ["src/google/protobuf", "well_known_protos"]
+ }
+}
diff --git a/etc/import/TARGETS.re2 b/etc/import/TARGETS.re2
new file mode 100644
index 00000000..34fd2291
--- /dev/null
+++ b/etc/import/TARGETS.re2
@@ -0,0 +1,55 @@
+{ "re2":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["re2"]
+ , "srcs":
+ [ "re2/bitstate.cc"
+ , "re2/compile.cc"
+ , "re2/dfa.cc"
+ , "re2/filtered_re2.cc"
+ , "re2/mimics_pcre.cc"
+ , "re2/nfa.cc"
+ , "re2/onepass.cc"
+ , "re2/parse.cc"
+ , "re2/perl_groups.cc"
+ , "re2/prefilter.cc"
+ , "re2/prefilter_tree.cc"
+ , "re2/prog.cc"
+ , "re2/re2.cc"
+ , "re2/regexp.cc"
+ , "re2/set.cc"
+ , "re2/simplify.cc"
+ , "re2/stringpiece.cc"
+ , "re2/tostring.cc"
+ , "re2/unicode_casefold.cc"
+ , "re2/unicode_groups.cc"
+ , "util/rune.cc"
+ , "util/strutil.cc"
+ ]
+ , "private-hdrs":
+ [ "re2/bitmap256.h"
+ , "re2/pod_array.h"
+ , "re2/prefilter.h"
+ , "re2/prefilter_tree.h"
+ , "re2/prog.h"
+ , "re2/regexp.h"
+ , "re2/sparse_array.h"
+ , "re2/sparse_set.h"
+ , "re2/unicode_casefold.h"
+ , "re2/unicode_groups.h"
+ , "re2/walker-inl.h"
+ , "util/logging.h"
+ , "util/mix.h"
+ , "util/mutex.h"
+ , "util/strutil.h"
+ , "util/utf.h"
+ , "util/util.h"
+ ]
+ , "hdrs":
+ [ "re2/filtered_re2.h"
+ , "re2/re2.h"
+ , "re2/set.h"
+ , "re2/stringpiece.h"
+ ]
+ , "link external": ["-pthread"]
+ }
+}
diff --git a/etc/import/TARGETS.upb b/etc/import/TARGETS.upb
new file mode 100644
index 00000000..bbacf8c9
--- /dev/null
+++ b/etc/import/TARGETS.upb
@@ -0,0 +1,32 @@
+{ "port":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["upb_port"]
+ , "pure C": ["YES"]
+ , "srcs": ["upb/port.c"]
+ , "hdrs":
+ [ "upb/port_def.inc"
+ , "upb/port_undef.inc"
+ ]
+ }
+, "upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["upb"]
+ , "pure C": ["YES"]
+ , "srcs":
+ [ "upb/decode.c"
+ , "upb/encode.c"
+ , "upb/msg.c"
+ , "upb/table.c"
+ , "upb/upb.c"
+ ]
+ , "hdrs":
+ [ "upb/decode.h"
+ , "upb/encode.h"
+ , "upb/upb.h"
+ , "upb/upb.hpp"
+ , "upb/msg.h"
+ , "upb/table.int.h"
+ ]
+ , "deps": ["port"]
+ }
+}
diff --git a/etc/import/TARGETS.zlib b/etc/import/TARGETS.zlib
new file mode 100644
index 00000000..2c801300
--- /dev/null
+++ b/etc/import/TARGETS.zlib
@@ -0,0 +1,32 @@
+{ "zlib":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["zlib"]
+ , "pure C": ["YES"]
+ , "srcs":
+ [ "adler32.c"
+ , "compress.c"
+ , "crc32.c"
+ , "deflate.c"
+ , "infback.c"
+ , "inffast.c"
+ , "inflate.c"
+ , "inftrees.c"
+ , "trees.c"
+ , "uncompr.c"
+ , "zutil.c"
+ ]
+ , "hdrs":
+ [ "crc32.h"
+ , "deflate.h"
+ , "gzguts.h"
+ , "inffast.h"
+ , "inffixed.h"
+ , "inflate.h"
+ , "inftrees.h"
+ , "trees.h"
+ , "zconf.h"
+ , "zlib.h"
+ , "zutil.h"
+ ]
+ }
+}
diff --git a/etc/import/absl/algorithm/TARGETS.absl b/etc/import/absl/algorithm/TARGETS.absl
new file mode 100644
index 00000000..0c93afac
--- /dev/null
+++ b/etc/import/absl/algorithm/TARGETS.absl
@@ -0,0 +1,8 @@
+{ "algorithm":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["algorithm"]
+ , "stage": ["absl", "algorithm"]
+ , "hdrs": ["algorithm.h"]
+ , "deps": [["absl/base", "config"]]
+ }
+}
diff --git a/etc/import/absl/base/TARGETS.absl b/etc/import/absl/base/TARGETS.absl
new file mode 100644
index 00000000..e292e389
--- /dev/null
+++ b/etc/import/absl/base/TARGETS.absl
@@ -0,0 +1,148 @@
+{ "config":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["config"]
+ , "stage": ["absl", "base"]
+ , "hdrs":
+ [ "config.h"
+ , "options.h"
+ , "policy_checks.h"
+ ]
+ }
+, "core_headers":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["core_headers"]
+ , "stage": ["absl", "base"]
+ , "hdrs":
+ [ "attributes.h"
+ , "const_init.h"
+ , "macros.h"
+ , "optimization.h"
+ , "port.h"
+ , "thread_annotations.h"
+ , "internal/thread_annotations.h"
+ ]
+ , "deps": ["config"]
+ }
+, "atomic_hook":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["atomic_hook"]
+ , "stage": ["absl", "base"]
+ , "hdrs": ["internal/atomic_hook.h"]
+ , "deps": ["config", "core_headers"]
+ }
+, "dynamic_annotations":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["dynamic_annotations"]
+ , "stage": ["absl", "base"]
+ , "local defines": ["__CLANG_SUPPORT_DYN_ANNOTATION__"]
+ , "srcs": ["dynamic_annotations.cc"]
+ , "hdrs": ["dynamic_annotations.h"]
+ }
+, "log_severity":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["log_severity"]
+ , "stage": ["absl", "base"]
+ , "srcs": ["log_severity.cc"]
+ , "hdrs": ["log_severity.h"]
+ , "deps": ["config", "core_headers"]
+ }
+, "raw_logging_internal":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["raw_logging_internal"]
+ , "stage": ["absl", "base"]
+ , "srcs": ["internal/raw_logging.cc"]
+ , "hdrs": ["internal/raw_logging.h"]
+ , "deps": ["atomic_hook", "config", "core_headers", "log_severity"]
+ }
+, "errno_saver":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["errno_saver"]
+ , "stage": ["absl", "base"]
+ , "hdrs": ["internal/errno_saver.h"]
+ , "deps": ["config"]
+ }
+, "base_internal":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["base_internal"]
+ , "stage": ["absl", "base"]
+ , "hdrs":
+ [ "internal/hide_ptr.h"
+ , "internal/identity.h"
+ , "internal/inline_variable.h"
+ , "internal/invoke.h"
+ , "internal/scheduling_mode.h"
+ ]
+ , "deps": ["config" , ["absl/meta", "type_traits"]]
+ }
+, "spinlock_wait":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["spinlock_wait"]
+ , "stage": ["absl", "base"]
+ , "srcs": ["internal/spinlock_wait.cc"]
+ , "hdrs":
+ [ "internal/spinlock_akaros.inc"
+ , "internal/spinlock_linux.inc"
+ , "internal/spinlock_posix.inc"
+ , "internal/spinlock_wait.h"
+ , "internal/spinlock_win32.inc"
+ ]
+ , "deps": ["base_internal", "core_headers", "errno_saver"]
+ }
+, "endian":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["endian"]
+ , "stage": ["absl", "base"]
+ , "hdrs": ["internal/endian.h", "internal/unaligned_access.h"]
+ , "deps": ["config", "core_headers"]
+ }
+, "bits":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["bits"]
+ , "stage": ["absl", "base"]
+ , "hdrs": ["internal/bits.h"]
+ , "deps": ["config", "core_headers"]
+ }
+, "throw_delegate":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["throw_delegate"]
+ , "stage": ["absl", "base"]
+ , "srcs": ["internal/throw_delegate.cc"]
+ , "hdrs": ["internal/throw_delegate.h"]
+ , "deps": ["config", "raw_logging_internal"]
+ }
+, "base":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["base"]
+ , "stage": ["absl", "base"]
+ , "srcs":
+ [ "internal/cycleclock.cc"
+ , "internal/spinlock.cc"
+ , "internal/sysinfo.cc"
+ , "internal/thread_identity.cc"
+ , "internal/unscaledcycleclock.cc"
+ ]
+ , "hdrs":
+ [ "call_once.h"
+ , "casts.h"
+ , "internal/cycleclock.h"
+ , "internal/low_level_scheduling.h"
+ , "internal/per_thread_tls.h"
+ , "internal/spinlock.h"
+ , "internal/sysinfo.h"
+ , "internal/thread_identity.h"
+ , "internal/tsan_mutex_interface.h"
+ , "internal/unscaledcycleclock.h"
+ ]
+ , "deps":
+ [ "atomic_hook"
+ , "base_internal"
+ , "config"
+ , "core_headers"
+ , "dynamic_annotations"
+ , "log_severity"
+ , "raw_logging_internal"
+ , "spinlock_wait"
+ , ["absl/meta", "type_traits"]
+ ]
+ }
+}
diff --git a/etc/import/absl/container/TARGETS.absl b/etc/import/absl/container/TARGETS.absl
new file mode 100644
index 00000000..ee8d035d
--- /dev/null
+++ b/etc/import/absl/container/TARGETS.absl
@@ -0,0 +1,34 @@
+{ "compressed_tuple":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["compressed_tuple"]
+ , "stage": ["absl", "container"]
+ , "hdrs": ["internal/compressed_tuple.h"]
+ , "deps": [["absl/utility", "utility"]]
+ }
+, "inlined_vector_internal":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["inlined_vector_internal"]
+ , "stage": ["absl", "container"]
+ , "hdrs": ["internal/inlined_vector.h"]
+ , "deps":
+ [ "compressed_tuple"
+ , ["absl/base", "core_headers"]
+ , ["absl/memory", "memory"]
+ , ["absl/meta", "type_traits"]
+ , ["absl/types", "span"]
+ ]
+ }
+, "inlined_vector":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["inlined_vector"]
+ , "stage": ["absl", "container"]
+ , "hdrs": ["inlined_vector.h"]
+ , "deps":
+ [ "inlined_vector_internal"
+ , ["absl/algorithm", "algorithm"]
+ , ["absl/base", "core_headers"]
+ , ["absl/base", "throw_delegate"]
+ , ["absl/memory", "memory"]
+ ]
+ }
+}
diff --git a/etc/import/absl/memory/TARGETS.absl b/etc/import/absl/memory/TARGETS.absl
new file mode 100644
index 00000000..2187f004
--- /dev/null
+++ b/etc/import/absl/memory/TARGETS.absl
@@ -0,0 +1,8 @@
+{ "memory":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["memory"]
+ , "stage": ["absl", "memory"]
+ , "hdrs": ["memory.h"]
+ , "deps": [["absl/base", "core_headers"], ["absl/meta", "type_traits"]]
+ }
+}
diff --git a/etc/import/absl/meta/TARGETS.absl b/etc/import/absl/meta/TARGETS.absl
new file mode 100644
index 00000000..56ed3601
--- /dev/null
+++ b/etc/import/absl/meta/TARGETS.absl
@@ -0,0 +1,8 @@
+{ "type_traits":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["type_traits"]
+ , "stage": ["absl", "meta"]
+ , "hdrs": ["type_traits.h"]
+ , "deps": [["absl/base", "config"]]
+ }
+}
diff --git a/etc/import/absl/numeric/TARGETS.absl b/etc/import/absl/numeric/TARGETS.absl
new file mode 100644
index 00000000..5947d70f
--- /dev/null
+++ b/etc/import/absl/numeric/TARGETS.absl
@@ -0,0 +1,13 @@
+{ "int128":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["int128"]
+ , "stage": ["absl", "numeric"]
+ , "srcs": ["int128.cc"]
+ , "hdrs":
+ [ "int128.h"
+ , "int128_have_intrinsic.inc"
+ , "int128_no_intrinsic.inc"
+ ]
+ , "deps": [["absl/base", "config"], ["absl/base", "core_headers"]]
+ }
+}
diff --git a/etc/import/absl/strings/TARGETS.absl b/etc/import/absl/strings/TARGETS.absl
new file mode 100644
index 00000000..979080be
--- /dev/null
+++ b/etc/import/absl/strings/TARGETS.absl
@@ -0,0 +1,115 @@
+{ "internal":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["strings_internal"]
+ , "stage": ["absl", "strings"]
+ , "srcs":
+ [ "internal/escaping.cc"
+ , "internal/ostringstream.cc"
+ , "internal/utf8.cc"
+ ]
+ , "hdrs":
+ [ "internal/char_map.h"
+ , "internal/escaping.h"
+ , "internal/ostringstream.h"
+ , "internal/resize_uninitialized.h"
+ , "internal/utf8.h"
+ ]
+ , "deps":
+ [ ["absl/base", "config"]
+ , ["absl/base", "core_headers"]
+ , ["absl/base", "endian"]
+ , ["absl/base", "raw_logging_internal"]
+ , ["absl/meta", "type_traits"]
+ ]
+ }
+, "strings":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["strings"]
+ , "stage": ["absl", "strings"]
+ , "srcs":
+ [ "ascii.cc"
+ , "charconv.cc"
+ , "escaping.cc"
+ , "internal/charconv_bigint.cc"
+ , "internal/charconv_parse.cc"
+ , "internal/memutil.cc"
+ , "match.cc"
+ , "numbers.cc"
+ , "str_cat.cc"
+ , "str_replace.cc"
+ , "str_split.cc"
+ , "string_view.cc"
+ , "substitute.cc"
+ ]
+ , "hdrs":
+ [ "ascii.h"
+ , "charconv.h"
+ , "escaping.h"
+ , "internal/memutil.h"
+ , "internal/charconv_bigint.h"
+ , "internal/charconv_parse.h"
+ , "internal/stl_type_traits.h"
+ , "internal/str_join_internal.h"
+ , "internal/str_split_internal.h"
+ , "match.h"
+ , "numbers.h"
+ , "str_cat.h"
+ , "str_join.h"
+ , "str_replace.h"
+ , "str_split.h"
+ , "string_view.h"
+ , "strip.h"
+ , "substitute.h"
+ ]
+ , "deps":
+ [ "internal"
+ , ["absl/base", "base"]
+ , ["absl/base", "bits"]
+ , ["absl/base", "config"]
+ , ["absl/base", "core_headers"]
+ , ["absl/base", "endian"]
+ , ["absl/base", "raw_logging_internal"]
+ , ["absl/base", "throw_delegate"]
+ , ["absl/memory", "memory"]
+ , ["absl/meta", "type_traits"]
+ , ["absl/numeric", "int128"]
+ ]
+ }
+, "str_format_internal":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["str_format_internal"]
+ , "stage": ["absl", "strings"]
+ , "srcs":
+ [ "internal/str_format/arg.cc"
+ , "internal/str_format/bind.cc"
+ , "internal/str_format/extension.cc"
+ , "internal/str_format/float_conversion.cc"
+ , "internal/str_format/output.cc"
+ , "internal/str_format/parser.cc"
+ ]
+ , "hdrs":
+ [ "internal/str_format/arg.h"
+ , "internal/str_format/bind.h"
+ , "internal/str_format/checker.h"
+ , "internal/str_format/extension.h"
+ , "internal/str_format/float_conversion.h"
+ , "internal/str_format/output.h"
+ , "internal/str_format/parser.h"
+ ]
+ , "deps":
+ [ "strings"
+ , ["absl/base", "config"]
+ , ["absl/base", "core_headers"]
+ , ["absl/meta", "type_traits"]
+ , ["absl/numeric", "int128"]
+ , ["absl/types", "span"]
+ ]
+ }
+, "str_format":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["str_format"]
+ , "stage": ["absl", "strings"]
+ , "hdrs": ["str_format.h"]
+ , "deps": ["str_format_internal"]
+ }
+}
diff --git a/etc/import/absl/time/TARGETS.absl b/etc/import/absl/time/TARGETS.absl
new file mode 100644
index 00000000..198e584e
--- /dev/null
+++ b/etc/import/absl/time/TARGETS.absl
@@ -0,0 +1,30 @@
+{ "time":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["time"]
+ , "stage": ["absl", "time"]
+ , "srcs":
+ [ "civil_time.cc"
+ , "clock.cc"
+ , "duration.cc"
+ , "format.cc"
+ , "time.cc"
+ ]
+ , "hdrs":
+ [ "civil_time.h"
+ , "clock.h"
+ , "time.h"
+ , "internal/get_current_time_chrono.inc"
+ , "internal/get_current_time_posix.inc"
+ ]
+ , "deps":
+ [ ["absl/base", "base"]
+ , ["absl/base", "core_headers"]
+ , ["absl/base", "raw_logging_internal"]
+ , ["absl/numeric", "int128"]
+ , ["absl/strings", "strings"]
+ , ["absl/base", "config"]
+ , ["./", "internal/cctz", "civil_time"]
+ , ["./", "internal/cctz", "time_zone"]
+ ]
+ }
+}
diff --git a/etc/import/absl/time/internal/cctz/TARGETS.absl b/etc/import/absl/time/internal/cctz/TARGETS.absl
new file mode 100644
index 00000000..497ac3e6
--- /dev/null
+++ b/etc/import/absl/time/internal/cctz/TARGETS.absl
@@ -0,0 +1,42 @@
+{ "civil_time":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["civil_time"]
+ , "stage": ["absl", "time", "internal", "cctz"]
+ , "srcs": ["src/civil_time_detail.cc"]
+ , "hdrs":
+ [ "include/cctz/civil_time.h"
+ , "include/cctz/civil_time_detail.h"
+ ]
+ , "deps": [["absl/base", "config"]]
+ }
+, "time_zone":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["time_zone"]
+ , "stage": ["absl", "time", "internal", "cctz"]
+ , "srcs":
+ [ "src/time_zone_fixed.cc"
+ , "src/time_zone_format.cc"
+ , "src/time_zone_if.cc"
+ , "src/time_zone_impl.cc"
+ , "src/time_zone_info.cc"
+ , "src/time_zone_libc.cc"
+ , "src/time_zone_lookup.cc"
+ , "src/time_zone_posix.cc"
+ , "src/zone_info_source.cc"
+ ]
+ , "private-hdrs":
+ [ "src/time_zone_fixed.h"
+ , "src/time_zone_if.h"
+ , "src/time_zone_impl.h"
+ , "src/time_zone_info.h"
+ , "src/time_zone_libc.h"
+ , "src/time_zone_posix.h"
+ , "src/tzfile.h"
+ ]
+ , "hdrs":
+ [ "include/cctz/time_zone.h"
+ , "include/cctz/zone_info_source.h"
+ ]
+ , "deps": ["civil_time", ["absl/base", "config"]]
+ }
+}
diff --git a/etc/import/absl/types/TARGETS.absl b/etc/import/absl/types/TARGETS.absl
new file mode 100644
index 00000000..8c777e2c
--- /dev/null
+++ b/etc/import/absl/types/TARGETS.absl
@@ -0,0 +1,39 @@
+{ "span":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["span"]
+ , "stage": ["absl", "types"]
+ , "hdrs": ["internal/span.h", "span.h"]
+ , "deps":
+ [ ["absl/algorithm", "algorithm"]
+ , ["absl/base", "core_headers"]
+ , ["absl/base", "throw_delegate"]
+ , ["absl/meta", "type_traits"]
+ ]
+ }
+, "bad_optional_access":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["bad_optional_access"]
+ , "stage": ["absl", "types"]
+ , "srcs": ["bad_optional_access.cc"]
+ , "hdrs": ["bad_optional_access.h"]
+ , "deps":
+ [ ["absl/base", "config"]
+ , ["absl/base", "raw_logging_internal"]
+ ]
+ }
+, "optional":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["optional"]
+ , "stage": ["absl", "types"]
+ , "hdrs": ["internal/optional.h", "optional.h"]
+ , "deps":
+ [ "bad_optional_access"
+ , ["absl/base", "base_internal"]
+ , ["absl/base", "config"]
+ , ["absl/base", "core_headers"]
+ , ["absl/memory", "memory"]
+ , ["absl/meta", "type_traits"]
+ , ["absl/utility", "utility"]
+ ]
+ }
+}
diff --git a/etc/import/absl/utility/TARGETS.absl b/etc/import/absl/utility/TARGETS.absl
new file mode 100644
index 00000000..65c5ac54
--- /dev/null
+++ b/etc/import/absl/utility/TARGETS.absl
@@ -0,0 +1,12 @@
+{ "utility":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["utility"]
+ , "stage": ["absl", "utility"]
+ , "hdrs": ["utility.h"]
+ , "deps":
+ [ ["absl/base", "base_internal"]
+ , ["absl/base", "config"]
+ , ["absl/meta", "type_traits"]
+ ]
+ }
+}
diff --git a/etc/import/deps/http-parser/TARGETS.git2 b/etc/import/deps/http-parser/TARGETS.git2
new file mode 100644
index 00000000..a2689a60
--- /dev/null
+++ b/etc/import/deps/http-parser/TARGETS.git2
@@ -0,0 +1,8 @@
+{ "git2_http_parser":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["git2_http_parser"]
+ , "pure C": ["yes"]
+ , "srcs": ["http_parser.c"]
+ , "private-hdrs": ["http_parser.h"]
+ }
+}
diff --git a/etc/import/deps/pcre/TARGETS.git2 b/etc/import/deps/pcre/TARGETS.git2
new file mode 100644
index 00000000..badd2a19
--- /dev/null
+++ b/etc/import/deps/pcre/TARGETS.git2
@@ -0,0 +1,248 @@
+{ "config.h":
+ { "type": "file_gen"
+ , "arguments_config": ["OS"]
+ , "name": "config.h"
+ , "data":
+ { "type": "let*"
+ , "bindings":
+ [ ["SUPPORT_PCRE8", 1]
+ , ["PCRE_LINK_SIZE", "2"]
+ , ["PCRE_PARENS_NEST_LIMIT", "250"]
+ , ["PCRE_MATCH_LIMIT", "10000000"]
+ , ["PCRE_MATCH_LIMIT_RECURSION", "MATCH_LIMIT"]
+ , ["PCRE_NEWLINE", "LF"]
+ , ["NO_RECURSE", 1]
+ , ["PCRE_POSIX_MALLOC_THRESHOLD", "10"]
+ , ["BSR_ANYCRLF", 0]
+ ]
+ , "body":
+ { "type": "join"
+ , "separator": "\n"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ [ "#define HAVE_STDINT_H 1"
+ , "#define HAVE_INTTYPES_H 1"
+ , "#define HAVE_INTTYPES_H 1"
+ ]
+ , { "type": "case"
+ , "expr": {"type": "var", "name": "OS"}
+ , "case": {"windows": ["#define HAVE_WINDOWS_H 1"]}
+ , "default":
+ [ "#define HAVE_UNISTD_H 1"
+ , "#define HAVE_DIRENT_H 1"
+ , "#define HAVE_SYS_STAT_H 1"
+ , "#define HAVE_SYS_TYPES_H 1"
+ ]
+ }
+ , [ "#define HAVE_MEMMOVE_H 1"
+ , "#define HAVE_STRERROR_H 1"
+ , "#define HAVE_STRTOLL_H 1"
+ ]
+ , { "type": "case"
+ , "expr": {"type": "var", "name": "OS"}
+ , "case": {"windows": ["#define HAVE__STRTOI64_H 1"]}
+ , "default":
+ [ "#define HAVE_BCOPY_H 1"
+ , "#define HAVE_STRTOQ_H 1"
+ ]
+ }
+ , [ "#define HAVE_LONG_LONG 1"
+ , "#define HAVE_UNSIGNED_LONG_LONG 1"
+ ]
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "PCRE_STATIC"}
+ , "then": ["#define PCRE_STATIC 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_PCRE8"}
+ , "then": ["#define SUPPORT_PCRE8 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_PCRE16"}
+ , "then": ["#define SUPPORT_PCRE16 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_PCRE32"}
+ , "then": ["#define SUPPORT_PCRE32 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_JIT"}
+ , "then": ["#define SUPPORT_JIT 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_PCREGREP_JIT"}
+ , "then": ["#define SUPPORT_PCREGREP_JIT 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_UTF"}
+ , "then": ["#define SUPPORT_UTF 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_UCP"}
+ , "then": ["#define SUPPORT_UCP 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "EBCDIC"}
+ , "then": ["#define EBCDIC 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "EBCDIC_NL25"}
+ , "then": ["#define EBCDIC_NL25 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "BSR_ANYCRLF"}
+ , "then": ["#define BSR_ANYCRLF 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "NO_RECURSE"}
+ , "then": ["#define NO_RECURSE 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_LIBBZ2"}
+ , "then": ["#define SUPPORT_LIBBZ2 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_LIBZ"}
+ , "then": ["#define SUPPORT_LIBZ 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_LIBEDIT"}
+ , "then": ["#define SUPPORT_LIBEDIT 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_LIBREADLINE"}
+ , "then": ["#define SUPPORT_LIBREADLINE 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_VALGRIND"}
+ , "then": ["#define SUPPORT_VALGRIND 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "SUPPORT_GCOV"}
+ , "then": ["#define SUPPORT_GCOV 1"]
+ }
+ , { "type": "case"
+ , "expr": {"type": "var", "name": "PCRE_NEWLINE"}
+ , "case":
+ { "LF": ["#define NEWLINE 10"]
+ , "CR": ["#define NEWLINE 13"]
+ , "CRLF": ["#define NEWLINE 3338"]
+ , "ANY": ["#define NEWLINE -1"]
+ , "ANYCRLF": ["#define NEWLINE -2"]
+ }
+ , "default":
+ { "type": "fail"
+ , "msg": "Unsupported value of PCRE_NEWLINE variable"
+ }
+ }
+ , [ { "type": "join"
+ , "$1":
+ [ "#define POSIX_MALLOC_THRESHOLD "
+ , {"type": "var", "name": "PCRE_POSIX_MALLOC_THRESHOLD"}
+ ]
+ }
+ , { "type": "join"
+ , "$1":
+ [ "#define LINK_SIZE "
+ , {"type": "var", "name": "PCRE_LINK_SIZE"}
+ ]
+ }
+ , { "type": "join"
+ , "$1":
+ [ "#define PARENS_NEST_LIMIT "
+ , {"type": "var", "name": "PCRE_PARENS_NEST_LIMIT"}
+ ]
+ }
+ , { "type": "join"
+ , "$1":
+ [ "#define MATCH_LIMIT "
+ , {"type": "var", "name": "PCRE_MATCH_LIMIT"}
+ ]
+ }
+ , { "type": "join"
+ , "$1":
+ [ "#define MATCH_LIMIT_RECURSION "
+ , {"type": "var", "name": "PCRE_MATCH_LIMIT_RECURSION"}
+ ]
+ }
+ , { "type": "join"
+ , "$1":
+ [ "#define PCREGREP_BUFSIZE "
+ , {"type": "var", "name": "PCREGREP_BUFSIZE", "default": ""}
+ ]
+ }
+ ]
+ , [ "#define MAX_NAME_SIZE 32"
+ , "#define MAX_NAME_COUNT 10000"
+ ]
+ ]
+ }
+ }
+ }
+ }
+, "git2_pcreposix_sources":
+ { "type": "install"
+ , "deps": ["pcreposix.c"]
+ }
+, "git2_pcreposix_headers":
+ { "type": "install"
+ , "deps": ["pcreposix.h"]
+ }
+, "git2_pcre_sources":
+ { "type": "install"
+ , "deps":
+ [ "pcre_byte_order.c"
+ , "pcre_chartables.c"
+ , "pcre_compile.c"
+ , "pcre_config.c"
+ , "pcre_dfa_exec.c"
+ , "pcre_exec.c"
+ , "pcre_fullinfo.c"
+ , "pcre_get.c"
+ , "pcre_globals.c"
+ , "pcre_jit_compile.c"
+ , "pcre_maketables.c"
+ , "pcre_newline.c"
+ , "pcre_ord2utf8.c"
+ , "pcre_refcount.c"
+ , "pcre_string_utils.c"
+ , "pcre_study.c"
+ , "pcre_tables.c"
+ , "pcre_ucd.c"
+ , "pcre_valid_utf8.c"
+ , "pcre_version.c"
+ , "pcre_xclass.c"
+ , "git2_pcreposix_sources"
+ ]
+ }
+, "git2_pcre_headers":
+ { "type": "install"
+ , "deps":
+ [ "config.h"
+ , "pcre.h"
+ , "pcre_internal.h"
+ , "ucp.h"
+ , "git2_pcreposix_headers"
+ ]
+ }
+, "git2_pcre":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["OS"]
+ , "name": ["git2_pcre"]
+ , "pure C": ["yes"]
+ , "local defines":
+ { "type": "++"
+ , "$1":
+ [ ["HAVE_CONFIG_H"]
+ , { "type": "case"
+ , "expr": {"type": "var", "name": "OS"}
+ , "case":
+ {"windows": ["_CRT_SECURE_NO_DEPRECATE", "_CRT_SECURE_NO_WARNINGS"]}
+ }
+ ]
+ }
+ , "srcs": ["git2_pcre_sources"]
+ , "hdrs": ["git2_pcre_headers"]
+ }
+}
diff --git a/etc/import/include/CLI/TARGETS.cli11 b/etc/import/include/CLI/TARGETS.cli11
new file mode 100644
index 00000000..d9764847
--- /dev/null
+++ b/etc/import/include/CLI/TARGETS.cli11
@@ -0,0 +1,22 @@
+{ "cli11_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["CLI"]
+ , "hdrs":
+ [ "Option.hpp"
+ , "Split.hpp"
+ , "Formatter.hpp"
+ , "ConfigFwd.hpp"
+ , "Version.hpp"
+ , "TypeTools.hpp"
+ , "Validators.hpp"
+ , "FormatterFwd.hpp"
+ , "Macros.hpp"
+ , "StringTools.hpp"
+ , "Error.hpp"
+ , "Timer.hpp"
+ , "CLI.hpp"
+ , "App.hpp"
+ , "Config.hpp"
+ ]
+ }
+}
diff --git a/etc/import/include/TARGETS.git2 b/etc/import/include/TARGETS.git2
new file mode 100644
index 00000000..03e54424
--- /dev/null
+++ b/etc/import/include/TARGETS.git2
@@ -0,0 +1,325 @@
+{ "git2/sys/features.h":
+ { "type": "file_gen"
+ , "arguments_config":
+ [ "OS"
+ , "ARCH"
+ , "TARGET_ARCH"
+ , "DEBUG_POOL"
+ , "ENABLE_TRACE"
+ , "THREADSAFE"
+ , "USE_ICONV"
+ , "USE_NSEC"
+ , "REGEX_BACKEND"
+ , "USE_SSH"
+ , "USE_NTLMCLIENT"
+ , "USE_GSSAPI"
+ , "USE_SHA1"
+ , "USE_HTTPS"
+ , "WINHTTP"
+ ]
+ , "name": "git2/sys/features.h"
+ , "data":
+ { "type": "let*"
+ , "bindings":
+ [ [ "THREADSAFE", {"type": "var", "name": "THREADSAFE", "default": true}]
+ , [ "ENABLE_TRACE"
+ , {"type": "var", "name": "ENABLE_TRACE", "default": true}
+ ]
+ , [ "USE_SSH", {"type": "var", "name": "USE_SSH", "default": true}]
+ , [ "WINHTTP"
+ , { "type": "var"
+ , "name": "WINHTTP"
+ , "default":
+ {"type": "==", "$1": {"type": "var", "name": "OS"}, "$2": "windows"}
+ }
+ ]
+ , [ "USE_HTTPS"
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_HTTPS", "default": true}
+ , "case":
+ [ [ true
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "OS"}
+ , "case":
+ [ [ "windows"
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "WINHTTP"}
+ , "then": "WinHTTP"
+ , "else": true
+ }
+ ]
+ ]
+ , "default": true
+ }
+ ]
+ ]
+ , "default": {"type": "var", "name": "USE_HTTPS"}
+ }
+ ]
+ , [ "USE_GSSAPI"
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_GSSAPI", "default": true}
+ , "case":
+ [ [ true
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "OS"}
+ , "case": [["darwin", "GSS.framework"]]
+ , "default": "gssapi"
+ }
+ ]
+ ]
+ , "default": {"type": "var", "name": "USE_GSSAPI"}
+ }
+ ]
+ , [ "USE_SHA1",
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_SHA1", "default": true}
+ , "case":
+ [ [ true, "CollisionDetection"]
+ , [ "HTTPS",
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_HTTPS"}
+ , "case":
+ [ ["SecureTransport", "CommonCrypto"]
+ , ["WinHTTP", "Win32"]
+ , [false, "CollisionDetection"]
+ , [null, "CollisionDetection"]
+ ]
+ , "default": {"type": "var", "name": "USE_HTTPS"}
+ }
+ ]
+ ]
+ , "default": {"type": "var", "name": "USE_SHA1"}
+ }
+ ]
+ ]
+ , "body":
+ { "type": "join"
+ , "separator": "\n"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ [ "#ifndef INCLUDE_features_h__"
+ , "#define INCLUDE_features_h__"
+ , ""
+ ]
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "DEBUG_POOL"}
+ , "then": ["#define GIT_DEBUG_POOL 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "ENABLE_TRACE"}
+ , "then": ["#define GIT_TRACE 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "THREADSAFE"}
+ , "then": ["#define GIT_THREADS 1"]
+ }
+ , { "type": "if"
+ , "cond":
+ { "type": "=="
+ , "$1":
+ { "type": "var"
+ , "name": "TARGET_ARCH"
+ , "default": {"type": "var", "name": "ARCH"}
+ }
+ , "$2": "x86_64"
+ }
+ , "then": ["#define GIT_ARCH_64 1"]
+ , "else": ["#define GIT_ARCH_32 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "USE_ICONV"}
+ , "then": ["#define GIT_USE_ICONV 1"]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "USE_NSEC"}
+ , "then":
+ { "type": "++"
+ , "$1":
+ [ [ "#define GIT_USE_NSEC 1"
+ , "#define GIT_USE_FUTIMENS 1"
+ , "#define GIT_USE_STAT_MTIME_NSEC 1"
+ ]
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "OS"}
+ , "case":
+ { "darwin": ["#define GIT_USE_STAT_MTIMESPEC 1"]
+ , "bsd": ["#define GIT_USE_STAT_MTIMESPEC 1"]
+ }
+ , "default": ["#define GIT_USE_STAT_MTIM 1"]
+ }
+ ]
+ }
+ }
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "REGEX_BACKEND"}
+ , "case":
+ [ ["regcomp_l", ["#define GIT_REGEX_REGCOMP_L 1"]]
+ , ["regcomp", ["#define GIT_REGEX_REGCOMP 1"]]
+ , ["pcre", ["#define GIT_REGEX_PCRE 1"]]
+ , ["pcre2", ["#define GIT_REGEX_PCRE2 1"]]
+ , ["builtin", ["#define GIT_REGEX_BUILTIN 1"]]
+ , [null, ["#define GIT_REGEX_BUILTIN 1"]]
+ ]
+ , "default":
+ { "type": "fail"
+ , "msg": "The REGEX_BACKEND option provided is not supported"
+ }
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SSH"}
+ , "then":
+ [ "#define GIT_SSH 1"
+ , "#define GIT_SSH_MEMORY_CREDENTIALS 1"
+ ]
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "USE_NTLMCLIENT"}
+ , "then": ["#define GIT_NTLM 1"]
+ }
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_GSSAPI"}
+ , "case":
+ [ ["GSS.framework", ["#define GIT_GSSFRAMEWORK 1"]]
+ , ["gssapi", ["#define GIT_GSSAPI 1"]]
+ , [false, []]
+ ]
+ , "default":
+ { "type": "fail"
+ , "msg": "Backend asked for in USE_GSSAPI is not supported"
+ }
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "USE_HTTPS"}
+ , "then":
+ { "type": "++"
+ , "$1":
+ [ ["#define GIT_HTTPS 1"]
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_HTTPS"}
+ , "case":
+ [ [ "SecureTransport", ["#define GIT_SECURE_TRANSPORT 1"]]
+ , [ "OpenSSL", ["#define GIT_OPENSSL 1"]]
+ , [ "mbedTLS", ["#define GIT_MBEDTLS 1"]]
+ , [ "WinHTTP", ["#define GIT_WINHTTP 1"]]
+ ]
+ , "default":
+ { "type": "fail"
+ , "msg": "Backend asked for in USE_HTTPS is not supported"
+ }
+ }
+ ]
+ }
+ }
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_SHA1"}
+ , "case":
+ [ ["CollisionDetection", ["#define GIT_SHA1_COLLISIONDETECT 1"]]
+ , ["Win32", ["#define GIT_SHA1_WIN32 1"]]
+ , ["CommonCrypto", ["#define GIT_SHA1_COMMON_CRYPTO 1"]]
+ , ["OpenSSL", ["#define GIT_SHA1_OPENSSL 1"]]
+ , ["mbedTLS", ["#define GIT_SHA1_MBEDTLS 1"]]
+ ]
+ }
+ , ["", "#endif", ""]
+ ]
+ }
+ }
+ }
+ }
+, "git2_public_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "git2.h"
+ , "git2/annotated_commit.h"
+ , "git2/apply.h"
+ , "git2/attr.h"
+ , "git2/blame.h"
+ , "git2/blob.h"
+ , "git2/branch.h"
+ , "git2/buffer.h"
+ , "git2/cert.h"
+ , "git2/checkout.h"
+ , "git2/cherrypick.h"
+ , "git2/clone.h"
+ , "git2/commit.h"
+ , "git2/common.h"
+ , "git2/config.h"
+ , "git2/credential.h"
+ , "git2/credential_helpers.h"
+ , "git2/cred_helpers.h"
+ , "git2/deprecated.h"
+ , "git2/describe.h"
+ , "git2/diff.h"
+ , "git2/errors.h"
+ , "git2/filter.h"
+ , "git2/global.h"
+ , "git2/graph.h"
+ , "git2/ignore.h"
+ , "git2/indexer.h"
+ , "git2/index.h"
+ , "git2/mailmap.h"
+ , "git2/merge.h"
+ , "git2/message.h"
+ , "git2/net.h"
+ , "git2/notes.h"
+ , "git2/object.h"
+ , "git2/odb_backend.h"
+ , "git2/odb.h"
+ , "git2/oidarray.h"
+ , "git2/oid.h"
+ , "git2/pack.h"
+ , "git2/patch.h"
+ , "git2/pathspec.h"
+ , "git2/proxy.h"
+ , "git2/rebase.h"
+ , "git2/refdb.h"
+ , "git2/reflog.h"
+ , "git2/refs.h"
+ , "git2/refspec.h"
+ , "git2/remote.h"
+ , "git2/repository.h"
+ , "git2/reset.h"
+ , "git2/revert.h"
+ , "git2/revparse.h"
+ , "git2/revwalk.h"
+ , "git2/signature.h"
+ , "git2/stash.h"
+ , "git2/status.h"
+ , "git2/stdint.h"
+ , "git2/strarray.h"
+ , "git2/submodule.h"
+ , "git2/tag.h"
+ , "git2/trace.h"
+ , "git2/transaction.h"
+ , "git2/transport.h"
+ , "git2/tree.h"
+ , "git2/types.h"
+ , "git2/version.h"
+ , "git2/worktree.h"
+ , "git2/sys/alloc.h"
+ , "git2/sys/commit.h"
+ , "git2/sys/config.h"
+ , "git2/sys/credential.h"
+ , "git2/sys/cred.h"
+ , "git2/sys/diff.h"
+ , "git2/sys/filter.h"
+ , "git2/sys/hashsig.h"
+ , "git2/sys/index.h"
+ , "git2/sys/mempack.h"
+ , "git2/sys/merge.h"
+ , "git2/sys/odb_backend.h"
+ , "git2/sys/openssl.h"
+ , "git2/sys/path.h"
+ , "git2/sys/refdb_backend.h"
+ , "git2/sys/reflog.h"
+ , "git2/sys/refs.h"
+ , "git2/sys/repository.h"
+ , "git2/sys/stream.h"
+ , "git2/sys/transport.h"
+ , "git2/sys/features.h"
+ ]
+ }
+}
diff --git a/etc/import/include/fmt/TARGETS.fmt b/etc/import/include/fmt/TARGETS.fmt
new file mode 100644
index 00000000..79394153
--- /dev/null
+++ b/etc/import/include/fmt/TARGETS.fmt
@@ -0,0 +1,19 @@
+{ "hdrs":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["fmt"]
+ , "hdrs":
+ [ "chrono.h"
+ , "color.h"
+ , "compile.h"
+ , "core.h"
+ , "format.h"
+ , "format-inl.h"
+ , "locale.h"
+ , "os.h"
+ , "ostream.h"
+ , "posix.h"
+ , "printf.h"
+ , "ranges.h"
+ ]
+ }
+}
diff --git a/etc/import/include/grpc++/TARGETS.grpc b/etc/import/include/grpc++/TARGETS.grpc
new file mode 100644
index 00000000..51508454
--- /dev/null
+++ b/etc/import/include/grpc++/TARGETS.grpc
@@ -0,0 +1,102 @@
+{ "grpc++_public_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpc++"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "alarm.h"
+ , "channel.h"
+ , "client_context.h"
+ , "completion_queue.h"
+ , "create_channel.h"
+ , "create_channel_posix.h"
+ , "ext/health_check_service_server_builder_option.h"
+ , "generic/async_generic_service.h"
+ , "generic/generic_stub.h"
+ , "grpc++.h"
+ , "health_check_service_interface.h"
+ , "impl/call.h"
+ , "impl/channel_argument_option.h"
+ , "impl/client_unary_call.h"
+ , "impl/codegen/core_codegen.h"
+ , "impl/grpc_library.h"
+ , "impl/method_handler_impl.h"
+ , "impl/rpc_method.h"
+ , "impl/rpc_service_method.h"
+ , "impl/serialization_traits.h"
+ , "impl/server_builder_option.h"
+ , "impl/server_builder_plugin.h"
+ , "impl/server_initializer.h"
+ , "impl/service_type.h"
+ , "security/auth_context.h"
+ , "resource_quota.h"
+ , "security/auth_metadata_processor.h"
+ , "security/credentials.h"
+ , "security/server_credentials.h"
+ , "server.h"
+ , "server_builder.h"
+ , "server_context.h"
+ , "server_posix.h"
+ , "support/async_stream.h"
+ , "support/async_unary_call.h"
+ , "support/byte_buffer.h"
+ , "support/channel_arguments.h"
+ , "support/config.h"
+ , "support/slice.h"
+ , "support/status.h"
+ , "support/status_code_enum.h"
+ , "support/string_ref.h"
+ , "support/stub_options.h"
+ , "support/sync_stream.h"
+ , "support/time.h"
+ ]
+ }
+, "grpc++_config_proto_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpc++"]
+ , "public stage": ["yes"]
+ , "hdrs": ["impl/codegen/config_protobuf.h"]
+ }
+, "grpc++_codegen_base_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpc++"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "impl/codegen/async_stream.h"
+ , "impl/codegen/async_unary_call.h"
+ , "impl/codegen/byte_buffer.h"
+ , "impl/codegen/call.h"
+ , "impl/codegen/call_hook.h"
+ , "impl/codegen/channel_interface.h"
+ , "impl/codegen/client_context.h"
+ , "impl/codegen/client_unary_call.h"
+ , "impl/codegen/completion_queue.h"
+ , "impl/codegen/completion_queue_tag.h"
+ , "impl/codegen/config.h"
+ , "impl/codegen/core_codegen_interface.h"
+ , "impl/codegen/create_auth_context.h"
+ , "impl/codegen/grpc_library.h"
+ , "impl/codegen/metadata_map.h"
+ , "impl/codegen/method_handler_impl.h"
+ , "impl/codegen/rpc_method.h"
+ , "impl/codegen/rpc_service_method.h"
+ , "impl/codegen/security/auth_context.h"
+ , "impl/codegen/serialization_traits.h"
+ , "impl/codegen/server_context.h"
+ , "impl/codegen/server_interface.h"
+ , "impl/codegen/service_type.h"
+ , "impl/codegen/slice.h"
+ , "impl/codegen/status.h"
+ , "impl/codegen/status_code_enum.h"
+ , "impl/codegen/string_ref.h"
+ , "impl/codegen/stub_options.h"
+ , "impl/codegen/sync_stream.h"
+ , "impl/codegen/time.h"
+ ]
+ }
+, "grpc++_codegen_proto_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpc++"]
+ , "public stage": ["yes"]
+ , "hdrs": ["impl/codegen/proto_utils.h"]
+ }
+}
diff --git a/etc/import/include/grpc/TARGETS.grpc b/etc/import/include/grpc/TARGETS.grpc
new file mode 100644
index 00000000..8b6bb3e8
--- /dev/null
+++ b/etc/import/include/grpc/TARGETS.grpc
@@ -0,0 +1,105 @@
+{ "gpr_public_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpc"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "support/alloc.h"
+ , "support/atm.h"
+ , "support/atm_gcc_atomic.h"
+ , "support/atm_gcc_sync.h"
+ , "support/atm_windows.h"
+ , "support/cpu.h"
+ , "support/log.h"
+ , "support/log_windows.h"
+ , "support/port_platform.h"
+ , "support/string_util.h"
+ , "support/sync.h"
+ , "support/sync_abseil.h"
+ , "support/sync_custom.h"
+ , "support/sync_generic.h"
+ , "support/sync_posix.h"
+ , "support/sync_windows.h"
+ , "support/thd_id.h"
+ , "support/time.h"
+ ]
+ }
+, "grpc_public_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpc"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "byte_buffer.h"
+ , "byte_buffer_reader.h"
+ , "compression.h"
+ , "fork.h"
+ , "grpc.h"
+ , "grpc_posix.h"
+ , "grpc_security_constants.h"
+ , "slice.h"
+ , "slice_buffer.h"
+ , "status.h"
+ , "load_reporting.h"
+ , "support/workaround_list.h"
+ ]
+ }
+, "gpr_codegen_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpc"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "impl/codegen/atm.h"
+ , "impl/codegen/atm_gcc_atomic.h"
+ , "impl/codegen/atm_gcc_sync.h"
+ , "impl/codegen/atm_windows.h"
+ , "impl/codegen/fork.h"
+ , "impl/codegen/gpr_slice.h"
+ , "impl/codegen/gpr_types.h"
+ , "impl/codegen/log.h"
+ , "impl/codegen/port_platform.h"
+ , "impl/codegen/sync.h"
+ , "impl/codegen/sync_abseil.h"
+ , "impl/codegen/sync_custom.h"
+ , "impl/codegen/sync_generic.h"
+ , "impl/codegen/sync_posix.h"
+ , "impl/codegen/sync_windows.h"
+ ]
+ }
+, "gpr_codegen":
+ { "type": ["@", "rules", "CC", "library"]
+ , "hdrs": ["gpr_codegen_headers"]
+ }
+, "grpc_codegen_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpc"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "impl/codegen/byte_buffer.h"
+ , "impl/codegen/byte_buffer_reader.h"
+ , "impl/codegen/compression_types.h"
+ , "impl/codegen/connectivity_state.h"
+ , "impl/codegen/grpc_types.h"
+ , "impl/codegen/propagation_bits.h"
+ , "impl/codegen/status.h"
+ , "impl/codegen/slice.h"
+ ]
+ }
+, "grpc_codegen":
+ { "type": ["@", "rules", "CC", "library"]
+ , "hdrs":
+ [ "grpc_codegen_headers"
+ , "gpr_codegen_headers"
+ ]
+ }
+, "grpc_secure_public_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpc"]
+ , "public stage": ["yes"]
+ , "hdrs": ["grpc_security.h"]
+ }
+, "census_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpc"]
+ , "public stage": ["yes"]
+ , "hdrs": ["census.h"]
+ }
+}
diff --git a/etc/import/include/grpcpp/TARGETS.grpc b/etc/import/include/grpcpp/TARGETS.grpc
new file mode 100644
index 00000000..c42b215f
--- /dev/null
+++ b/etc/import/include/grpcpp/TARGETS.grpc
@@ -0,0 +1,168 @@
+{ "grpcpp_public_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpcpp"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "alarm.h"
+ , "alarm_impl.h"
+ , "channel.h"
+ , "channel_impl.h"
+ , "client_context.h"
+ , "completion_queue.h"
+ , "completion_queue_impl.h"
+ , "create_channel.h"
+ , "create_channel_impl.h"
+ , "create_channel_posix.h"
+ , "ext/health_check_service_server_builder_option.h"
+ , "generic/async_generic_service.h"
+ , "generic/generic_stub.h"
+ , "grpcpp.h"
+ , "health_check_service_interface.h"
+ , "impl/call.h"
+ , "impl/channel_argument_option.h"
+ , "impl/client_unary_call.h"
+ , "impl/codegen/core_codegen.h"
+ , "impl/grpc_library.h"
+ , "impl/method_handler_impl.h"
+ , "impl/rpc_method.h"
+ , "impl/rpc_service_method.h"
+ , "impl/serialization_traits.h"
+ , "impl/server_builder_option.h"
+ , "impl/server_builder_option_impl.h"
+ , "impl/server_builder_plugin.h"
+ , "impl/server_initializer.h"
+ , "impl/server_initializer_impl.h"
+ , "impl/service_type.h"
+ , "resource_quota.h"
+ , "security/auth_context.h"
+ , "security/auth_metadata_processor.h"
+ , "security/credentials.h"
+ , "security/credentials_impl.h"
+ , "security/server_credentials.h"
+ , "security/server_credentials_impl.h"
+ , "security/tls_credentials_options.h"
+ , "server.h"
+ , "server_impl.h"
+ , "server_builder.h"
+ , "server_context.h"
+ , "server_posix.h"
+ , "support/async_stream.h"
+ , "support/async_stream_impl.h"
+ , "support/async_unary_call.h"
+ , "support/async_unary_call_impl.h"
+ , "support/byte_buffer.h"
+ , "support/channel_arguments.h"
+ , "support/channel_arguments_impl.h"
+ , "support/client_callback.h"
+ , "support/client_callback_impl.h"
+ , "support/client_interceptor.h"
+ , "support/config.h"
+ , "support/interceptor.h"
+ , "support/message_allocator.h"
+ , "support/method_handler.h"
+ , "support/proto_buffer_reader.h"
+ , "support/proto_buffer_writer.h"
+ , "support/server_callback.h"
+ , "support/server_callback_impl.h"
+ , "support/server_interceptor.h"
+ , "support/slice.h"
+ , "support/status.h"
+ , "support/status_code_enum.h"
+ , "support/string_ref.h"
+ , "support/stub_options.h"
+ , "support/sync_stream.h"
+ , "support/sync_stream_impl.h"
+ , "support/time.h"
+ , "support/validate_service_config.h"
+ ]
+ }
+, "grpcpp_config_proto_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpcpp"]
+ , "public stage": ["yes"]
+ , "hdrs": ["impl/codegen/config_protobuf.h"]
+ }
+, "grpcpp_codegen_base_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpcpp"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "impl/codegen/async_generic_service.h"
+ , "impl/codegen/async_stream.h"
+ , "impl/codegen/async_stream_impl.h"
+ , "impl/codegen/async_unary_call.h"
+ , "impl/codegen/async_unary_call_impl.h"
+ , "impl/codegen/byte_buffer.h"
+ , "impl/codegen/call.h"
+ , "impl/codegen/call_hook.h"
+ , "impl/codegen/call_op_set.h"
+ , "impl/codegen/call_op_set_interface.h"
+ , "impl/codegen/callback_common.h"
+ , "impl/codegen/channel_interface.h"
+ , "impl/codegen/client_callback.h"
+ , "impl/codegen/client_callback_impl.h"
+ , "impl/codegen/client_context.h"
+ , "impl/codegen/client_context_impl.h"
+ , "impl/codegen/client_interceptor.h"
+ , "impl/codegen/client_unary_call.h"
+ , "impl/codegen/completion_queue.h"
+ , "impl/codegen/completion_queue_impl.h"
+ , "impl/codegen/completion_queue_tag.h"
+ , "impl/codegen/config.h"
+ , "impl/codegen/core_codegen_interface.h"
+ , "impl/codegen/create_auth_context.h"
+ , "impl/codegen/delegating_channel.h"
+ , "impl/codegen/grpc_library.h"
+ , "impl/codegen/intercepted_channel.h"
+ , "impl/codegen/interceptor.h"
+ , "impl/codegen/interceptor_common.h"
+ , "impl/codegen/message_allocator.h"
+ , "impl/codegen/metadata_map.h"
+ , "impl/codegen/method_handler.h"
+ , "impl/codegen/method_handler_impl.h"
+ , "impl/codegen/rpc_method.h"
+ , "impl/codegen/rpc_service_method.h"
+ , "impl/codegen/security/auth_context.h"
+ , "impl/codegen/serialization_traits.h"
+ , "impl/codegen/server_callback.h"
+ , "impl/codegen/server_callback_handlers.h"
+ , "impl/codegen/server_callback_impl.h"
+ , "impl/codegen/server_context.h"
+ , "impl/codegen/server_context_impl.h"
+ , "impl/codegen/server_interceptor.h"
+ , "impl/codegen/server_interface.h"
+ , "impl/codegen/service_type.h"
+ , "impl/codegen/slice.h"
+ , "impl/codegen/status.h"
+ , "impl/codegen/status_code_enum.h"
+ , "impl/codegen/string_ref.h"
+ , "impl/codegen/stub_options.h"
+ , "impl/codegen/sync_stream.h"
+ , "impl/codegen/sync_stream_impl.h"
+ , "impl/codegen/time.h"
+ ]
+ }
+, "grpcpp_codegen_proto_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpcpp"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "impl/codegen/proto_buffer_reader.h"
+ , "impl/codegen/proto_buffer_writer.h"
+ , "impl/codegen/proto_utils.h"
+ ]
+ }
+, "grpc++_internal_hdrs_only_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["grpcpp"]
+ , "public stage": ["yes"]
+ , "hdrs": ["impl/codegen/sync.h"]
+ }
+, "grpc++_internal_hdrs_only":
+ { "type": ["@", "rules", "CC", "library"]
+ , "hdrs":
+ [ "grpc++_internal_hdrs_only_headers"
+ , ["./", "../grpc", "gpr_codegen_headers"]
+ ]
+ }
+}
diff --git a/etc/import/libgit2.org b/etc/import/libgit2.org
new file mode 100644
index 00000000..3f0d9a1c
--- /dev/null
+++ b/etc/import/libgit2.org
@@ -0,0 +1,47 @@
+* Options
+ - ~USE_SYSTEM_LIBS~ = true|false
+ - true: link against libs from system's ambient environment
+ - false: link against libs open repository name
+ - ~DEBUG_POOL~ = true|false: Debug pool allocator
+ - ~ENABLE_TRACE~ = true|false: Tracing support
+ - ~THEADSAFE~ = true|false:
+ - ~USE_ICONV~ = true|false:
+ - ~USE_NSEC~ = true|false:
+ - ~USE_NTLMCLIENT~ = true|false:
+ - ~USE_SSH~ = true|false
+ - true: link against libssh2 (from system or open name)
+ - false: no SSH support
+ - ~USE_GSSAPI~ = true|false|"GSS.framework"|"gssapi"
+ - true: use "GSS.framework" for ~OS=="darwin"~ or else "gssapi"
+ - false: Disable GSS
+ - "GSS.framework": Link against GSS framework (system or open name)
+ - "gssapi": Link against libgssapi (system or open name)
+ - ~USE_SHA1~ = true|"CollisionDetection"|"OpenSSL"|"mbedTLS"|"Win32"|"CommonCrypto"|"Generic"|"HTTPS"
+ - true: use "CollisionDetection"
+ - "CollisionDetection": build with shipped SHA1DC implementation
+ - "OpenSSL": link against OpenSSL compat library (from system or open name)
+ - "mbedTLS": link against mbedTLS (from system or open name)
+ - "Win32": link against Windows' SHA1 implementation (always from system)
+ - "CommonCrypto": build with shipped common_crypto implementation
+ - "Generic": build with shipped SHA1 implementation
+ - "HTTPS": inherit from ~USE_HTTPS~, or fall back to "CollisionDetection"
+ - ~WINHTTP~ = true|false: use "WinHTTP" if ~USE_HTTPS==true~
+ - ~USE_HTTPS~ = true|"SecureTransport"|"WinHTTP"|"OpenSSL"|"embedTLS"|false|null
+ - true: use "WinHTTP" if ~WINHTTP~ and ~OS=="windows"~
+ - "SecureTransport": link against Security framework (from system or open name)
+ - "WinHTTP": link against Windows' libwinhttp (always from system)
+ - "OpenSSL": link against OpenSSL (from system or open name)
+ - "mbedTLS": link against mbedTLS (from system or open name)
+ - false|null: Disable HTTPS
+ - ~USE_HTTP_PARSER~ = "system"|<other>
+ - "system": link against libhttp_parser (from system or open name)
+ - <other>: compile and link bundled http_parser (./deps/http-parser)
+ - ~USE_BUNDLED_ZLIB~ = true|false
+ - true: compile and link bundled zlib (./deps/zlib)
+ - false: link against libz (from system or open name)
+ - ~REGEX_BACKEND~ = "regcomp_l"|"pcre"|"pcre2"|"builtin"|null
+ - "regcomp_l": compile against regcomp_l (always from system)
+ - "regcomp": compile against regcomp (always from system)
+ - "pcre": link against libpcre (from system or open name)
+ - "pcre2": link against libpcre2 (from system or open name)
+ - "builtin"|null: compile and link bundled pcre (./deps/pcre)
diff --git a/etc/import/src/TARGETS.git2 b/etc/import/src/TARGETS.git2
new file mode 100644
index 00000000..44cd7ff5
--- /dev/null
+++ b/etc/import/src/TARGETS.git2
@@ -0,0 +1,779 @@
+{ "git2_private_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "alloc.h"
+ , "annotated_commit.h"
+ , "apply.h"
+ , "array.h"
+ , "assert_safe.h"
+ , "attrcache.h"
+ , "attr_file.h"
+ , "attr.h"
+ , "bitvec.h"
+ , "blame_git.h"
+ , "blame.h"
+ , "blob.h"
+ , "branch.h"
+ , "buffer.h"
+ , "buf_text.h"
+ , "cache.h"
+ , "cc-compat.h"
+ , "checkout.h"
+ , "clone.h"
+ , "commit.h"
+ , "commit_list.h"
+ , "common.h"
+ , "config_backend.h"
+ , "config_entries.h"
+ , "config.h"
+ , "config_parse.h"
+ , "delta.h"
+ , "diff_driver.h"
+ , "diff_file.h"
+ , "diff_generate.h"
+ , "diff.h"
+ , "diff_parse.h"
+ , "diff_tform.h"
+ , "diff_xdiff.h"
+ , "errors.h"
+ , "fetch.h"
+ , "fetchhead.h"
+ , "filebuf.h"
+ , "filter.h"
+ , "futils.h"
+ , "global.h"
+ , "hash.h"
+ , "idxmap.h"
+ , "ignore.h"
+ , "indexer.h"
+ , "index.h"
+ , "integer.h"
+ , "iterator.h"
+ , "khash.h"
+ , "mailmap.h"
+ , "map.h"
+ , "merge_driver.h"
+ , "merge.h"
+ , "message.h"
+ , "midx.h"
+ , "mwindow.h"
+ , "net.h"
+ , "netops.h"
+ , "notes.h"
+ , "object.h"
+ , "odb.h"
+ , "offmap.h"
+ , "oidarray.h"
+ , "oid.h"
+ , "oidmap.h"
+ , "pack.h"
+ , "pack-objects.h"
+ , "parse.h"
+ , "patch_generate.h"
+ , "patch.h"
+ , "patch_parse.h"
+ , "path.h"
+ , "pathspec.h"
+ , "pool.h"
+ , "posix.h"
+ , "pqueue.h"
+ , "proxy.h"
+ , "push.h"
+ , "reader.h"
+ , "refdb.h"
+ , "reflog.h"
+ , "refs.h"
+ , "refspec.h"
+ , "regexp.h"
+ , "remote.h"
+ , "repository.h"
+ , "repo_template.h"
+ , "revwalk.h"
+ , "signature.h"
+ , "sortedcache.h"
+ , "status.h"
+ , "stream.h"
+ , "strmap.h"
+ , "strnlen.h"
+ , "submodule.h"
+ , "sysdir.h"
+ , "tag.h"
+ , "thread-utils.h"
+ , "trace.h"
+ , "transaction.h"
+ , "tree-cache.h"
+ , "tree.h"
+ , "userdiff.h"
+ , "util.h"
+ , "varint.h"
+ , "vector.h"
+ , "wildmatch.h"
+ , "worktree.h"
+ , "zstream.h"
+ , "allocators/stdalloc.h"
+ , "allocators/win32_crtdbg.h"
+ , "hash/sha1.h"
+ , "hash/sha1/common_crypto.h"
+ , "hash/sha1/generic.h"
+ , "hash/sha1/mbedtls.h"
+ , "hash/sha1/openssl.h"
+ , "hash/sha1/win32.h"
+ , "streams/mbedtls.h"
+ , "streams/openssl.h"
+ , "streams/registry.h"
+ , "streams/socket.h"
+ , "streams/stransport.h"
+ , "streams/tls.h"
+ , "transports/auth.h"
+ , "transports/auth_negotiate.h"
+ , "transports/auth_ntlm.h"
+ , "transports/httpclient.h"
+ , "transports/http.h"
+ , "transports/smart.h"
+ , "transports/ssh.h"
+ , "unix/posix.h"
+ , "unix/pthread.h"
+ , "win32/dir.h"
+ , "win32/error.h"
+ , "win32/findfile.h"
+ , "win32/mingw-compat.h"
+ , "win32/msvc-compat.h"
+ , "win32/path_w32.h"
+ , "win32/posix.h"
+ , "win32/precompiled.h"
+ , "win32/reparse.h"
+ , "win32/thread.h"
+ , "win32/utf-conv.h"
+ , "win32/version.h"
+ , "win32/w32_buffer.h"
+ , "win32/w32_common.h"
+ , "win32/w32_crtdbg_stacktrace.h"
+ , "win32/w32_stack.h"
+ , "win32/w32_util.h"
+ , "win32/win32-compat.h"
+ , "xdiff/xdiff.h"
+ , "xdiff/xdiffi.h"
+ , "xdiff/xemit.h"
+ , "xdiff/xinclude.h"
+ , "xdiff/xmacros.h"
+ , "xdiff/xprepare.h"
+ , "xdiff/xtypes.h"
+ , "xdiff/xutils.h"
+ , ["include", "git2_public_headers"]
+ ]
+ }
+, "git2_os_unix":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["OS", "THREADSAFE"]
+ , "name": ["git2_os_unix"]
+ , "pure C": ["yes"]
+ , "srcs":
+ [ "unix/map.c"
+ , "unix/realpath.c"
+ ]
+ , "private-hdrs": ["git2_private_headers"]
+ , "link external":
+ { "type": "++"
+ , "$1":
+ [ { "type": "if"
+ , "cond": {"type": "var", "name": "THREADSAFE", "default": true}
+ , "then": ["-lpthread"]
+ }
+ , { "type": "case"
+ , "expr": {"type": "var", "name": "OS"}
+ , "case":
+ { "solaris": ["-lsocket", "-lnsl"]
+ , "sunos": ["-lsocket", "-lnsl"]
+ , "haiku": ["-lnetwork"]
+ }
+ }
+ ]
+ }
+ }
+, "git2_os_win32":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["git2_os_win32"]
+ , "pure C": ["yes"]
+ , "srcs":
+ [ "win32/dir.c"
+ , "win32/error.c"
+ , "win32/findfile.c"
+ , "win32/map.c"
+ , "win32/path_w32.c"
+ , "win32/posix_w32.c"
+ , "win32/precompiled.c"
+ , "win32/thread.c"
+ , "win32/utf-conv.c"
+ , "win32/w32_buffer.c"
+ , "win32/w32_crtdbg_stacktrace.c"
+ , "win32/w32_stack.c"
+ , "win32/w32_util.c"
+ ]
+ , "private-hdrs": ["git2_private_headers"]
+ , "link external": ["-lws2_32"]
+ }
+, "git2_os":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["OS"]
+ , "deps":
+ { "type": "if"
+ , "cond":
+ {"type": "==", "$1": {"type": "var", "name": "OS"}, "$2": "windows"}
+ , "then": ["git2_os_win32"]
+ , "else": ["git2_os_unix"]
+ }
+ }
+, "git2_hash_collision_detection":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["git2_hash_collision_detection"]
+ , "pure C": ["yes"]
+ , "local defines":
+ [ "SHA1DC_NO_STANDARD_INCLUDES=1"
+ , "SHA1DC_CUSTOM_INCLUDE_SHA1_C=\"common.h\""
+ , "SHA1DC_CUSTOM_INCLUDE_UBC_CHECK_C=\"common.h\""
+ ]
+ , "srcs":
+ [ "hash/sha1/collisiondetect.c"
+ , "hash/sha1/sha1dc/sha1.c"
+ , "hash/sha1/sha1dc/ubc_check.c"
+ ]
+ , "private-hdrs": ["git2_private_headers"]
+ }
+, "git2_hash_openssl":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["USE_SYSTEM_LIBS"]
+ , "name": ["git2_hash_openssl"]
+ , "pure C": ["yes"]
+ , "srcs": ["hash/sha1/openssl.c"]
+ , "private-hdrs": ["git2_private_headers"]
+ , "link external":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": ["-lssl"]
+ }
+ , "deps":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": []
+ , "else": [["@", "ssl", "", "crypto"]]
+ }
+ }
+, "git2_hash_common_crypto":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["USE_SYSTEM_LIBS"]
+ , "name": ["git2_hash_common_crypto"]
+ , "pure C": ["yes"]
+ , "srcs": ["hash/sha1/common_crypto.c"]
+ , "private-hdrs": ["git2_private_headers"]
+ , "link external":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": ["-lcommon_crypto"]
+ }
+ , "deps":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": []
+ , "else": [["@", "common_crypto", "", "crypto"]]
+ }
+ }
+, "git2_hash_mbedtls":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["USE_SYSTEM_LIBS"]
+ , "name": ["git2_hash_mbedtls"]
+ , "pure C": ["yes"]
+ , "srcs": ["hash/sha1/mbedtls.c"]
+ , "private-hdrs": ["git2_private_headers"]
+ , "link external":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": ["-lmbedtls"]
+ }
+ , "deps":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": []
+ , "else": [["@", "mbedtls", "", "crypto"]]
+ }
+ }
+, "git2_hash_win32":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["git2_hash_win32"]
+ , "pure C": ["yes"]
+ , "srcs": ["hash/sha1/win32.c"]
+ , "private-hdrs": ["git2_private_headers"]
+ }
+, "git2_hash_generic":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["git2_hash_generic"]
+ , "pure C": ["yes"]
+ , "srcs": ["hash/sha1/generic.c"]
+ , "private-hdrs": ["git2_private_headers"]
+ }
+, "git2_hash":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["USE_SHA1", "USE_HTTPS"]
+ , "deps":
+ { "type": "let*"
+ , "bindings":
+ [ [ "USE_SHA1",
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_SHA1"}
+ , "case":
+ [ [ true, "CollisionDetection"]
+ , [ "HTTPS",
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_HTTPS"}
+ , "case":
+ [ ["SecureTransport", "CommonCrypto"]
+ , ["WinHTTP", "Win32"]
+ , [false, "CollisionDetection"]
+ , [null, "CollisionDetection"]
+ ]
+ , "default": {"type": "var", "name": "USE_HTTPS"}
+ }
+ ]
+ ]
+ , "default": {"type": "var", "name": "USE_SHA1"}
+ }
+ ]
+ ]
+ , "body":
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_SHA1"}
+ , "case":
+ [ ["CollisionDetection", ["git2_hash_collision_detection"]]
+ , ["OpenSSL", ["git2_hash_openssl"]]
+ , ["CommonCrypto", ["git2_hash_common_crypto"]]
+ , ["mbedTLS", ["git2_hash_mbedtls"]]
+ , ["Win32", ["git2_hash_win32"]]
+ , ["Generic", ["git2_hash_generic"]]
+ ]
+ , "default":
+ { "type": "fail"
+ , "msg": "Asked for unknown SHA1 backend in `USE_SHA1`"
+ }
+ }
+ }
+ }
+, "git2_regex":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["USE_SYSTEM_LIBS", "REGEX_BACKEND"]
+ , "name": ["git2_regex"]
+ , "link external":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then":
+ { "type": "case"
+ , "expr": {"type": "var", "name": "REGEX_BACKEND"}
+ , "case":
+ { "pcre2": ["-lpcre2-8"]
+ , "pcre": ["-lpcre"]
+ }
+ }
+ }
+ , "deps":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then":
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "REGEX_BACKEND"}
+ , "case":
+ [ [ "builtin", [["deps/pcre", "git2_pcre"]]]
+ , [ null, [["deps/pcre", "git2_pcre"]]]
+ ]
+ }
+ , "else":
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "REGEX_BACKEND"}
+ , "case":
+ [ ["pcre2", [["@", "pcre2", "", "pcre2"]]]
+ , ["pcre", [["@", "pcre", "", "pcre"]]]
+ , ["builtin", [["deps/pcre", "git2_pcre"]]]
+ , [null, [["deps/pcre", "git2_pcre"]]]
+ ]
+ }
+ }
+ }
+, "git2_compress":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["USE_SYSTEM_LIBS", "USE_BUNDLED_ZLIB"]
+ , "name": ["git2_compress"]
+ , "link external":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_BUNDLED_ZLIB"}
+ , "then": []
+ , "else":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": ["-lz"]
+ }
+ }
+ , "deps":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_BUNDLED_ZLIB"}
+ , "then":
+ { "type": "fail"
+ , "msg": "bundled zlib from deps/zlib not supported yet."
+ }
+ , "else":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": []
+ , "else": [["@", "zlib", "", "zlib"]]
+ }
+ }
+ }
+, "git2_ssh":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["USE_SYSTEM_LIBS", "USE_SSH"]
+ , "name": ["git2_ssh"]
+ , "link external":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SSH"}
+ , "then":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": ["-lssh2"]
+ }
+ }
+ , "deps":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SSH"}
+ , "then":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": []
+ , "else": [["@", "ssh2", "", "ssh"]]
+ }
+ }
+ }
+, "git2_http_parser":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["USE_SYSTEM_LIBS", "USE_HTTP_PARSER"]
+ , "name": ["git2_http_parser"]
+ , "link external":
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_HTTP_PARSER"}
+ , "case":
+ [ [ "system"
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": ["-lhttp_parser"]
+ }
+ ]
+ ]
+ }
+ , "deps":
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_HTTP_PARSER"}
+ , "case":
+ [ [ "system"
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": []
+ , "else": [["@", "http_parser", "", "http_parser"]]
+ }
+ ]
+ ]
+ , "default": [["deps/http-parser", "git2_http_parser"]]
+ }
+ }
+, "git2_gssapi":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["OS", "USE_SYSTEM_LIBS", "USE_GSSAPI"]
+ , "name": ["git2_gssapi"]
+ , "link external":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then":
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_GSSAPI"}
+ , "case":
+ [ [ true
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "OS"}
+ , "case": [["darwin", ["-framework", "GSS"]]]
+ , "default": ["-lgssapi"]
+ }
+ ]
+ , ["gssapi", ["-lgssapi"]]
+ , ["GSS.framework", ["-framwork", "GSS"]]
+ ]
+ }
+ }
+ , "deps":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": []
+ , "else":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_GSSAPI"}
+ , "then": [["@", "gssapi", "", "gssapi"]]
+ }
+ }
+ }
+, "git2_https":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["OS", "USE_SYSTEM_LIBS", "WINHTTP", "USE_HTTPS"]
+ , "name": ["git2_http"]
+ , "link external":
+ { "type": "let*"
+ , "bindings":
+ [ [ "WINHTTP"
+ , { "type": "var"
+ , "name": "WINHTTP"
+ , "default":
+ {"type": "==", "$1": {"type": "var", "name": "OS"}, "$2": "windows"}
+ }
+ ]
+ , [ "USE_HTTPS"
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_HTTPS", "default": true}
+ , "case":
+ [ [ true
+ , { "type": "case*"
+ , "expr": {"type": "var", "name": "OS"}
+ , "case":
+ [ [ "windows"
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "WINHTTP"}
+ , "then": "WinHTTP"
+ , "else": true
+ }
+ ]
+ ]
+ , "default": true
+ }
+ ]
+ ]
+ , "default": {"type": "var", "name": "USE_HTTPS"}
+ }
+ ]
+ ]
+ , "body":
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_HTTPS"}
+ , "case": [["WinHTTP", ["-lwinhttp"]]]
+ , "default":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then":
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_HTTPS"}
+ , "case":
+ [ ["OpenSSL", ["-lssl"]]
+ , ["mbedTLS", ["-lmbedtls"]]
+ , [ "SecureTransport"
+ , ["-framework", "CoreFoundation", "-framework", "Security"]
+ ]
+ ]
+ }
+ }
+ }
+ }
+ , "deps":
+ { "type": "if"
+ , "cond": {"type": "var", "name": "USE_SYSTEM_LIBS"}
+ , "then": []
+ , "else":
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "USE_HTTPS", "default": true}
+ , "case":
+ [ ["OpenSSL", [["@", "ssl", "", "ssl"]]]
+ , ["mbedTLS", [["@", "mbedtls", "", "ssl"]]]
+ , ["SecureTransport", [["@", "secure_transport", "", "ssl"]]]
+ ]
+ }
+ }
+ }
+, "git2internal":
+ { "type": ["@", "rules", "CC", "library"]
+ , "arguments_config": ["OS"]
+ , "name": ["git2"]
+ , "pure C": ["yes"]
+ , "local defines":
+ { "type": "++"
+ , "$1":
+ [ ["HAVE_QSORT_S"]
+ , { "type": "case"
+ , "expr": {"type": "var", "name": "OS"}
+ , "case":
+ { "linux": ["HAVE_QSORT_R_GNU"]
+ , "darwin": ["HAVE_QSORT_R_BSD"]
+ , "bsd": ["HAVE_QSORT_R_BSD"]
+ }
+ }
+ ]
+ }
+ , "srcs":
+ [ "alloc.c"
+ , "annotated_commit.c"
+ , "apply.c"
+ , "attr.c"
+ , "attrcache.c"
+ , "attr_file.c"
+ , "blame.c"
+ , "blame_git.c"
+ , "blob.c"
+ , "branch.c"
+ , "buffer.c"
+ , "buf_text.c"
+ , "cache.c"
+ , "checkout.c"
+ , "cherrypick.c"
+ , "clone.c"
+ , "commit.c"
+ , "commit_list.c"
+ , "config.c"
+ , "config_cache.c"
+ , "config_entries.c"
+ , "config_file.c"
+ , "config_mem.c"
+ , "config_parse.c"
+ , "config_snapshot.c"
+ , "crlf.c"
+ , "date.c"
+ , "delta.c"
+ , "describe.c"
+ , "diff.c"
+ , "diff_driver.c"
+ , "diff_file.c"
+ , "diff_generate.c"
+ , "diff_parse.c"
+ , "diff_print.c"
+ , "diff_stats.c"
+ , "diff_tform.c"
+ , "diff_xdiff.c"
+ , "errors.c"
+ , "fetch.c"
+ , "fetchhead.c"
+ , "filebuf.c"
+ , "filter.c"
+ , "futils.c"
+ , "global.c"
+ , "graph.c"
+ , "hash.c"
+ , "hashsig.c"
+ , "ident.c"
+ , "idxmap.c"
+ , "ignore.c"
+ , "index.c"
+ , "indexer.c"
+ , "iterator.c"
+ , "mailmap.c"
+ , "merge.c"
+ , "merge_driver.c"
+ , "merge_file.c"
+ , "message.c"
+ , "midx.c"
+ , "mwindow.c"
+ , "net.c"
+ , "netops.c"
+ , "notes.c"
+ , "object_api.c"
+ , "object.c"
+ , "odb.c"
+ , "odb_loose.c"
+ , "odb_mempack.c"
+ , "odb_pack.c"
+ , "offmap.c"
+ , "oidarray.c"
+ , "oid.c"
+ , "oidmap.c"
+ , "pack.c"
+ , "pack-objects.c"
+ , "parse.c"
+ , "patch.c"
+ , "patch_generate.c"
+ , "patch_parse.c"
+ , "path.c"
+ , "pathspec.c"
+ , "pool.c"
+ , "posix.c"
+ , "pqueue.c"
+ , "proxy.c"
+ , "push.c"
+ , "reader.c"
+ , "rebase.c"
+ , "refdb.c"
+ , "refdb_fs.c"
+ , "reflog.c"
+ , "refs.c"
+ , "refspec.c"
+ , "regexp.c"
+ , "remote.c"
+ , "repository.c"
+ , "reset.c"
+ , "revert.c"
+ , "revparse.c"
+ , "revwalk.c"
+ , "settings.c"
+ , "signature.c"
+ , "sortedcache.c"
+ , "stash.c"
+ , "status.c"
+ , "strarray.c"
+ , "strmap.c"
+ , "submodule.c"
+ , "sysdir.c"
+ , "tag.c"
+ , "thread-utils.c"
+ , "trace.c"
+ , "trailer.c"
+ , "transaction.c"
+ , "transport.c"
+ , "tree.c"
+ , "tree-cache.c"
+ , "tsort.c"
+ , "util.c"
+ , "varint.c"
+ , "vector.c"
+ , "wildmatch.c"
+ , "worktree.c"
+ , "zstream.c"
+ , "allocators/stdalloc.c"
+ , "allocators/win32_crtdbg.c"
+ , "streams/mbedtls.c"
+ , "streams/openssl.c"
+ , "streams/registry.c"
+ , "streams/socket.c"
+ , "streams/stransport.c"
+ , "streams/tls.c"
+ , "transports/auth.c"
+ , "transports/auth_negotiate.c"
+ , "transports/auth_ntlm.c"
+ , "transports/credential.c"
+ , "transports/credential_helpers.c"
+ , "transports/git.c"
+ , "transports/http.c"
+ , "transports/httpclient.c"
+ , "transports/local.c"
+ , "transports/smart.c"
+ , "transports/smart_pkt.c"
+ , "transports/smart_protocol.c"
+ , "transports/ssh.c"
+ , "transports/winhttp.c"
+ , "xdiff/xdiffi.c"
+ , "xdiff/xemit.c"
+ , "xdiff/xhistogram.c"
+ , "xdiff/xmerge.c"
+ , "xdiff/xpatience.c"
+ , "xdiff/xprepare.c"
+ , "xdiff/xutils.c"
+ ]
+ , "hdrs": [["include", "git2_public_headers"]]
+ , "private-hdrs": ["git2_private_headers"]
+ , "deps":
+ [ "git2_os"
+ , "git2_hash"
+ , "git2_regex"
+ , "git2_compress"
+ , "git2_ssh"
+ , "git2_http_parser"
+ , "git2_gssapi"
+ , "git2_https"
+ ]
+ }
+}
diff --git a/etc/import/src/compiler/TARGETS.grpc b/etc/import/src/compiler/TARGETS.grpc
new file mode 100644
index 00000000..6c57deb8
--- /dev/null
+++ b/etc/import/src/compiler/TARGETS.grpc
@@ -0,0 +1,51 @@
+{ "grpc_plugin_support":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["grpc_plugin_support"]
+ , "stage": ["src", "compiler"]
+ , "srcs":
+ [ "cpp_generator.cc"
+ , "csharp_generator.cc"
+ , "node_generator.cc"
+ , "objective_c_generator.cc"
+ , "php_generator.cc"
+ , "python_generator.cc"
+ , "ruby_generator.cc"
+ ]
+ , "hdrs":
+ [ "config.h"
+ , "config_protobuf.h"
+ , "cpp_generator.h"
+ , "cpp_generator_helpers.h"
+ , "cpp_plugin.h"
+ , "csharp_generator.h"
+ , "csharp_generator_helpers.h"
+ , "generator_helpers.h"
+ , "node_generator.h"
+ , "node_generator_helpers.h"
+ , "objective_c_generator.h"
+ , "objective_c_generator_helpers.h"
+ , "php_generator.h"
+ , "php_generator_helpers.h"
+ , "protobuf_plugin.h"
+ , "python_generator.h"
+ , "python_generator_helpers.h"
+ , "python_private_generator.h"
+ , "ruby_generator.h"
+ , "ruby_generator_helpers-inl.h"
+ , "ruby_generator_map-inl.h"
+ , "ruby_generator_string-inl.h"
+ , "schema_interface.h"
+ ]
+ , "deps":
+ [ [".", "grpc++_config_proto"]
+ , ["@", "protobuf", ".", "protoc_lib"]
+ ]
+ }
+, "grpc_cpp_plugin":
+ { "type": ["@", "rules", "CC", "binary"]
+ , "name": ["grpc_cpp_plugin"]
+ , "srcs": ["cpp_plugin.cc"]
+ , "deps": ["grpc_plugin_support"]
+ , "link external": ["-pthread"]
+ }
+}
diff --git a/etc/import/src/core/ext/upb-generated/TARGETS.grpc b/etc/import/src/core/ext/upb-generated/TARGETS.grpc
new file mode 100644
index 00000000..c9d17ff3
--- /dev/null
+++ b/etc/import/src/core/ext/upb-generated/TARGETS.grpc
@@ -0,0 +1,137 @@
+{ "alts_upb":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["alts_upb"]
+ , "srcs":
+ [ "src/proto/grpc/gcp/altscontext.upb.c"
+ , "src/proto/grpc/gcp/handshaker.upb.c"
+ , "src/proto/grpc/gcp/transport_security_common.upb.c"
+ ]
+ , "hdrs":
+ [ "src/proto/grpc/gcp/altscontext.upb.h"
+ , "src/proto/grpc/gcp/handshaker.upb.h"
+ , "src/proto/grpc/gcp/transport_security_common.upb.h"
+ ]
+ , "deps": [["@", "upb", ".", "upb"]]
+ }
+, "envoy_core_upb_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "envoy/api/v2/core/address.upb.h"
+ , "envoy/api/v2/core/backoff.upb.h"
+ , "envoy/api/v2/core/base.upb.h"
+ , "envoy/api/v2/core/config_source.upb.h"
+ , "envoy/api/v2/core/event_service_config.upb.h"
+ , "envoy/api/v2/core/grpc_service.upb.h"
+ , "envoy/api/v2/core/health_check.upb.h"
+ , "envoy/api/v2/core/http_uri.upb.h"
+ , "envoy/api/v2/core/protocol.upb.h"
+ , "envoy/api/v2/core/socket_option.upb.h"
+ ]
+ }
+, "udpa_annotations_upb_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "udpa/annotations/migrate.upb.h"
+ , "udpa/annotations/sensitive.upb.h"
+ , "udpa/annotations/status.upb.h"
+ ]
+ }
+, "google_api_upb_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "google/api/annotations.upb.h"
+ , "google/api/http.upb.h"
+ , "google/protobuf/any.upb.h"
+ , "google/protobuf/descriptor.upb.h"
+ , "google/protobuf/duration.upb.h"
+ , "google/protobuf/empty.upb.h"
+ , "google/protobuf/struct.upb.h"
+ , "google/protobuf/timestamp.upb.h"
+ , "google/protobuf/wrappers.upb.h"
+ , "google/rpc/status.upb.h"
+ ]
+ }
+, "proto_gen_validate_upb_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs": ["gogoproto/gogo.upb.h", "validate/validate.upb.h"]
+ }
+, "envoy_annotations_upb_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "envoy/annotations/deprecation.upb.h"
+ , "envoy/annotations/resource.upb.h"
+ ]
+ }
+, "envoy_type_upb_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "envoy/type/http.upb.h"
+ , "envoy/type/matcher/regex.upb.h"
+ , "envoy/type/matcher/string.upb.h"
+ , "envoy/type/metadata/v2/metadata.upb.h"
+ , "envoy/type/percent.upb.h"
+ , "envoy/type/range.upb.h"
+ , "envoy/type/semantic_version.upb.h"
+ , "envoy/type/tracing/v2/custom_tag.upb.h"
+ ]
+ }
+, "envoy_ads_upb_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "envoy/api/v2/auth/cert.upb.h"
+ , "envoy/api/v2/auth/common.upb.h"
+ , "envoy/api/v2/auth/secret.upb.h"
+ , "envoy/api/v2/auth/tls.upb.h"
+ , "envoy/api/v2/cds.upb.h"
+ , "envoy/api/v2/cluster.upb.h"
+ , "envoy/api/v2/cluster/circuit_breaker.upb.h"
+ , "envoy/api/v2/cluster/filter.upb.h"
+ , "envoy/api/v2/cluster/outlier_detection.upb.h"
+ , "envoy/api/v2/discovery.upb.h"
+ , "envoy/api/v2/eds.upb.h"
+ , "envoy/api/v2/endpoint.upb.h"
+ , "envoy/api/v2/endpoint/endpoint.upb.h"
+ , "envoy/api/v2/endpoint/endpoint_components.upb.h"
+ , "envoy/api/v2/endpoint/load_report.upb.h"
+ , "envoy/api/v2/lds.upb.h"
+ , "envoy/api/v2/listener.upb.h"
+ , "envoy/api/v2/listener/listener.upb.h"
+ , "envoy/api/v2/listener/listener_components.upb.h"
+ , "envoy/api/v2/listener/udp_listener_config.upb.h"
+ , "envoy/api/v2/rds.upb.h"
+ , "envoy/api/v2/route.upb.h"
+ , "envoy/api/v2/route/route.upb.h"
+ , "envoy/api/v2/route/route_components.upb.h"
+ , "envoy/api/v2/scoped_route.upb.h"
+ , "envoy/api/v2/srds.upb.h"
+ , "envoy/config/filter/accesslog/v2/accesslog.upb.h"
+ , "envoy/config/filter/network/http_connection_manager/v2/http_connection_manager.upb.h"
+ , "envoy/config/listener/v2/api_listener.upb.h"
+ , "envoy/config/trace/v2/http_tracer.upb.h"
+ , "envoy/service/discovery/v2/ads.upb.h"
+ , "envoy/service/load_stats/v2/lrs.upb.h"
+ ]
+ }
+, "udpa_orca_upb_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs": ["udpa/data/orca/v1/orca_load_report.upb.h"]
+ }
+, "grpc_health_upb_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs": ["src/proto/grpc/health/v1/health.upb.h"]
+ }
+, "grpc_lb_upb_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "public stage": ["yes"]
+ , "hdrs": ["src/proto/grpc/lb/v1/load_balancer.upb.h"]
+ }
+}
diff --git a/etc/import/src/google/protobuf/TARGETS.protobuf b/etc/import/src/google/protobuf/TARGETS.protobuf
new file mode 100644
index 00000000..6efd7f94
--- /dev/null
+++ b/etc/import/src/google/protobuf/TARGETS.protobuf
@@ -0,0 +1,451 @@
+{ "protobuf_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["google", "protobuf"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "any.h"
+ , "any.pb.h"
+ , "api.pb.h"
+ , "arena.h"
+ , "arena_impl.h"
+ , "arenastring.h"
+ , "arena_test_util.h"
+ , "compiler/annotation_test_util.h"
+ , "compiler/code_generator.h"
+ , "compiler/command_line_interface.h"
+ , "compiler/cpp/cpp_enum_field.h"
+ , "compiler/cpp/cpp_enum.h"
+ , "compiler/cpp/cpp_extension.h"
+ , "compiler/cpp/cpp_field.h"
+ , "compiler/cpp/cpp_file.h"
+ , "compiler/cpp/cpp_generator.h"
+ , "compiler/cpp/cpp_helpers.h"
+ , "compiler/cpp/cpp_map_field.h"
+ , "compiler/cpp/cpp_message_field.h"
+ , "compiler/cpp/cpp_message.h"
+ , "compiler/cpp/cpp_message_layout_helper.h"
+ , "compiler/cpp/cpp_options.h"
+ , "compiler/cpp/cpp_padding_optimizer.h"
+ , "compiler/cpp/cpp_primitive_field.h"
+ , "compiler/cpp/cpp_service.h"
+ , "compiler/cpp/cpp_string_field.h"
+ , "compiler/cpp/cpp_unittest.h"
+ , "compiler/cpp/cpp_unittest.inc"
+ , "compiler/csharp/csharp_doc_comment.h"
+ , "compiler/csharp/csharp_enum_field.h"
+ , "compiler/csharp/csharp_enum.h"
+ , "compiler/csharp/csharp_field_base.h"
+ , "compiler/csharp/csharp_generator.h"
+ , "compiler/csharp/csharp_helpers.h"
+ , "compiler/csharp/csharp_map_field.h"
+ , "compiler/csharp/csharp_message_field.h"
+ , "compiler/csharp/csharp_message.h"
+ , "compiler/csharp/csharp_names.h"
+ , "compiler/csharp/csharp_options.h"
+ , "compiler/csharp/csharp_primitive_field.h"
+ , "compiler/csharp/csharp_reflection_class.h"
+ , "compiler/csharp/csharp_repeated_enum_field.h"
+ , "compiler/csharp/csharp_repeated_message_field.h"
+ , "compiler/csharp/csharp_repeated_primitive_field.h"
+ , "compiler/csharp/csharp_source_generator_base.h"
+ , "compiler/csharp/csharp_wrapper_field.h"
+ , "compiler/importer.h"
+ , "compiler/java/java_context.h"
+ , "compiler/java/java_doc_comment.h"
+ , "compiler/java/java_enum_field.h"
+ , "compiler/java/java_enum_field_lite.h"
+ , "compiler/java/java_enum.h"
+ , "compiler/java/java_enum_lite.h"
+ , "compiler/java/java_extension.h"
+ , "compiler/java/java_extension_lite.h"
+ , "compiler/java/java_field.h"
+ , "compiler/java/java_file.h"
+ , "compiler/java/java_generator_factory.h"
+ , "compiler/java/java_generator.h"
+ , "compiler/java/java_helpers.h"
+ , "compiler/java/java_map_field.h"
+ , "compiler/java/java_map_field_lite.h"
+ , "compiler/java/java_message_builder.h"
+ , "compiler/java/java_message_builder_lite.h"
+ , "compiler/java/java_message_field.h"
+ , "compiler/java/java_message_field_lite.h"
+ , "compiler/java/java_message.h"
+ , "compiler/java/java_message_lite.h"
+ , "compiler/java/java_name_resolver.h"
+ , "compiler/java/java_names.h"
+ , "compiler/java/java_options.h"
+ , "compiler/java/java_primitive_field.h"
+ , "compiler/java/java_primitive_field_lite.h"
+ , "compiler/java/java_service.h"
+ , "compiler/java/java_shared_code_generator.h"
+ , "compiler/java/java_string_field.h"
+ , "compiler/java/java_string_field_lite.h"
+ , "compiler/js/js_generator.h"
+ , "compiler/js/well_known_types_embed.h"
+ , "compiler/mock_code_generator.h"
+ , "compiler/objectivec/objectivec_enum_field.h"
+ , "compiler/objectivec/objectivec_enum.h"
+ , "compiler/objectivec/objectivec_extension.h"
+ , "compiler/objectivec/objectivec_field.h"
+ , "compiler/objectivec/objectivec_file.h"
+ , "compiler/objectivec/objectivec_generator.h"
+ , "compiler/objectivec/objectivec_helpers.h"
+ , "compiler/objectivec/objectivec_map_field.h"
+ , "compiler/objectivec/objectivec_message_field.h"
+ , "compiler/objectivec/objectivec_message.h"
+ , "compiler/objectivec/objectivec_nsobject_methods.h"
+ , "compiler/objectivec/objectivec_oneof.h"
+ , "compiler/objectivec/objectivec_primitive_field.h"
+ , "compiler/package_info.h"
+ , "compiler/parser.h"
+ , "compiler/php/php_generator.h"
+ , "compiler/plugin.h"
+ , "compiler/plugin.pb.h"
+ , "compiler/python/python_generator.h"
+ , "compiler/ruby/ruby_generator.h"
+ , "compiler/scc.h"
+ , "compiler/subprocess.h"
+ , "compiler/zip_writer.h"
+ , "descriptor_database.h"
+ , "descriptor.h"
+ , "descriptor.pb.h"
+ , "duration.pb.h"
+ , "dynamic_message.h"
+ , "empty.pb.h"
+ , "extension_set.h"
+ , "extension_set_inl.h"
+ , "field_mask.pb.h"
+ , "generated_enum_reflection.h"
+ , "generated_enum_util.h"
+ , "generated_message_reflection.h"
+ , "generated_message_table_driven.h"
+ , "generated_message_table_driven_lite.h"
+ , "generated_message_util.h"
+ , "has_bits.h"
+ , "implicit_weak_message.h"
+ , "inlined_string_field.h"
+ , "io/coded_stream.h"
+ , "io/gzip_stream.h"
+ , "io/io_win32.h"
+ , "io/package_info.h"
+ , "io/printer.h"
+ , "io/strtod.h"
+ , "io/tokenizer.h"
+ , "io/zero_copy_stream.h"
+ , "io/zero_copy_stream_impl.h"
+ , "io/zero_copy_stream_impl_lite.h"
+ , "map_entry.h"
+ , "map_entry_lite.h"
+ , "map_field.h"
+ , "map_field_inl.h"
+ , "map_field_lite.h"
+ , "map.h"
+ , "map_lite_test_util.h"
+ , "map_test_util.h"
+ , "map_test_util_impl.h"
+ , "map_test_util.inc"
+ , "map_type_handler.h"
+ , "message.h"
+ , "message_lite.h"
+ , "message_unittest.inc"
+ , "metadata.h"
+ , "metadata_lite.h"
+ , "package_info.h"
+ , "parse_context.h"
+ , "port_def.inc"
+ , "port.h"
+ , "port_undef.inc"
+ , "proto3_lite_unittest.inc"
+ , "reflection.h"
+ , "reflection_internal.h"
+ , "reflection_ops.h"
+ , "repeated_field.h"
+ , "service.h"
+ , "source_context.pb.h"
+ , "struct.pb.h"
+ , "stubs/bytestream.h"
+ , "stubs/callback.h"
+ , "stubs/casts.h"
+ , "stubs/common.h"
+ , "stubs/fastmem.h"
+ , "stubs/hash.h"
+ , "stubs/int128.h"
+ , "stubs/logging.h"
+ , "stubs/macros.h"
+ , "stubs/map_util.h"
+ , "stubs/mathutil.h"
+ , "stubs/mutex.h"
+ , "stubs/once.h"
+ , "stubs/platform_macros.h"
+ , "stubs/port.h"
+ , "stubs/status.h"
+ , "stubs/status_macros.h"
+ , "stubs/statusor.h"
+ , "stubs/stl_util.h"
+ , "stubs/stringpiece.h"
+ , "stubs/stringprintf.h"
+ , "stubs/strutil.h"
+ , "stubs/substitute.h"
+ , "stubs/template_util.h"
+ , "stubs/time.h"
+ , "testing/file.h"
+ , "testing/googletest.h"
+ , "test_util2.h"
+ , "test_util.h"
+ , "test_util.inc"
+ , "test_util_lite.h"
+ , "text_format.h"
+ , "timestamp.pb.h"
+ , "type.pb.h"
+ , "unknown_field_set.h"
+ , "util/delimited_message_util.h"
+ , "util/field_comparator.h"
+ , "util/field_mask_util.h"
+ , "util/internal/constants.h"
+ , "util/internal/datapiece.h"
+ , "util/internal/default_value_objectwriter.h"
+ , "util/internal/error_listener.h"
+ , "util/internal/expecting_objectwriter.h"
+ , "util/internal/field_mask_utility.h"
+ , "util/internal/json_escaping.h"
+ , "util/internal/json_objectwriter.h"
+ , "util/internal/json_stream_parser.h"
+ , "util/internal/location_tracker.h"
+ , "util/internal/mock_error_listener.h"
+ , "util/internal/object_location_tracker.h"
+ , "util/internal/object_source.h"
+ , "util/internal/object_writer.h"
+ , "util/internal/protostream_objectsource.h"
+ , "util/internal/protostream_objectwriter.h"
+ , "util/internal/proto_writer.h"
+ , "util/internal/structured_objectwriter.h"
+ , "util/internal/type_info.h"
+ , "util/internal/type_info_test_helper.h"
+ , "util/internal/utility.h"
+ , "util/json_util.h"
+ , "util/message_differencer.h"
+ , "util/package_info.h"
+ , "util/time_util.h"
+ , "util/type_resolver.h"
+ , "util/type_resolver_util.h"
+ , "wire_format.h"
+ , "wire_format_lite.h"
+ , "wrappers.pb.h"
+ ]
+ }
+
+, "protobuf_lite":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["protobuf_lite"]
+ , "hdrs": ["protobuf_headers"]
+ , "srcs":
+ [ "any_lite.cc"
+ , "arena.cc"
+ , "extension_set.cc"
+ , "generated_enum_util.cc"
+ , "generated_message_table_driven_lite.cc"
+ , "generated_message_util.cc"
+ , "implicit_weak_message.cc"
+ , "io/coded_stream.cc"
+ , "io/io_win32.cc"
+ , "io/strtod.cc"
+ , "io/zero_copy_stream.cc"
+ , "io/zero_copy_stream_impl.cc"
+ , "io/zero_copy_stream_impl_lite.cc"
+ , "message_lite.cc"
+ , "parse_context.cc"
+ , "repeated_field.cc"
+ , "stubs/bytestream.cc"
+ , "stubs/common.cc"
+ , "stubs/int128.cc"
+ , "stubs/status.cc"
+ , "stubs/statusor.cc"
+ , "stubs/stringpiece.cc"
+ , "stubs/stringprintf.cc"
+ , "stubs/structurally_valid.cc"
+ , "stubs/strutil.cc"
+ , "stubs/time.cc"
+ , "wire_format_lite.cc"
+ ]
+ }
+
+, "protobuf":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["protobuf"]
+ , "srcs":
+ [ "any.cc"
+ , "any.pb.cc"
+ , "api.pb.cc"
+ , "compiler/importer.cc"
+ , "compiler/parser.cc"
+ , "descriptor.cc"
+ , "descriptor.pb.cc"
+ , "descriptor_database.cc"
+ , "duration.pb.cc"
+ , "dynamic_message.cc"
+ , "empty.pb.cc"
+ , "extension_set_heavy.cc"
+ , "field_mask.pb.cc"
+ , "generated_message_reflection.cc"
+ , "generated_message_table_driven.cc"
+ , "io/gzip_stream.cc"
+ , "io/printer.cc"
+ , "io/tokenizer.cc"
+ , "map_field.cc"
+ , "message.cc"
+ , "reflection_ops.cc"
+ , "service.cc"
+ , "source_context.pb.cc"
+ , "struct.pb.cc"
+ , "stubs/substitute.cc"
+ , "text_format.cc"
+ , "timestamp.pb.cc"
+ , "type.pb.cc"
+ , "unknown_field_set.cc"
+ , "util/delimited_message_util.cc"
+ , "util/field_comparator.cc"
+ , "util/field_mask_util.cc"
+ , "util/internal/datapiece.cc"
+ , "util/internal/default_value_objectwriter.cc"
+ , "util/internal/error_listener.cc"
+ , "util/internal/field_mask_utility.cc"
+ , "util/internal/json_escaping.cc"
+ , "util/internal/json_objectwriter.cc"
+ , "util/internal/json_stream_parser.cc"
+ , "util/internal/object_writer.cc"
+ , "util/internal/proto_writer.cc"
+ , "util/internal/protostream_objectsource.cc"
+ , "util/internal/protostream_objectwriter.cc"
+ , "util/internal/type_info.cc"
+ , "util/internal/type_info_test_helper.cc"
+ , "util/internal/utility.cc"
+ , "util/json_util.cc"
+ , "util/message_differencer.cc"
+ , "util/time_util.cc"
+ , "util/type_resolver_util.cc"
+ , "wire_format.cc"
+ , "wrappers.pb.cc"
+ ]
+ , "deps": ["protobuf_lite"]
+ }
+
+, "protoc_lib":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["protoc_lib"]
+ , "srcs":
+ [ "compiler/code_generator.cc"
+ , "compiler/command_line_interface.cc"
+ , "compiler/cpp/cpp_enum.cc"
+ , "compiler/cpp/cpp_enum_field.cc"
+ , "compiler/cpp/cpp_extension.cc"
+ , "compiler/cpp/cpp_field.cc"
+ , "compiler/cpp/cpp_file.cc"
+ , "compiler/cpp/cpp_generator.cc"
+ , "compiler/cpp/cpp_helpers.cc"
+ , "compiler/cpp/cpp_map_field.cc"
+ , "compiler/cpp/cpp_message.cc"
+ , "compiler/cpp/cpp_message_field.cc"
+ , "compiler/cpp/cpp_padding_optimizer.cc"
+ , "compiler/cpp/cpp_primitive_field.cc"
+ , "compiler/cpp/cpp_service.cc"
+ , "compiler/cpp/cpp_string_field.cc"
+ , "compiler/csharp/csharp_doc_comment.cc"
+ , "compiler/csharp/csharp_enum.cc"
+ , "compiler/csharp/csharp_enum_field.cc"
+ , "compiler/csharp/csharp_field_base.cc"
+ , "compiler/csharp/csharp_generator.cc"
+ , "compiler/csharp/csharp_helpers.cc"
+ , "compiler/csharp/csharp_map_field.cc"
+ , "compiler/csharp/csharp_message.cc"
+ , "compiler/csharp/csharp_message_field.cc"
+ , "compiler/csharp/csharp_primitive_field.cc"
+ , "compiler/csharp/csharp_reflection_class.cc"
+ , "compiler/csharp/csharp_repeated_enum_field.cc"
+ , "compiler/csharp/csharp_repeated_message_field.cc"
+ , "compiler/csharp/csharp_repeated_primitive_field.cc"
+ , "compiler/csharp/csharp_source_generator_base.cc"
+ , "compiler/csharp/csharp_wrapper_field.cc"
+ , "compiler/java/java_context.cc"
+ , "compiler/java/java_doc_comment.cc"
+ , "compiler/java/java_enum.cc"
+ , "compiler/java/java_enum_field.cc"
+ , "compiler/java/java_enum_field_lite.cc"
+ , "compiler/java/java_enum_lite.cc"
+ , "compiler/java/java_extension.cc"
+ , "compiler/java/java_extension_lite.cc"
+ , "compiler/java/java_field.cc"
+ , "compiler/java/java_file.cc"
+ , "compiler/java/java_generator.cc"
+ , "compiler/java/java_generator_factory.cc"
+ , "compiler/java/java_helpers.cc"
+ , "compiler/java/java_map_field.cc"
+ , "compiler/java/java_map_field_lite.cc"
+ , "compiler/java/java_message.cc"
+ , "compiler/java/java_message_builder.cc"
+ , "compiler/java/java_message_builder_lite.cc"
+ , "compiler/java/java_message_field.cc"
+ , "compiler/java/java_message_field_lite.cc"
+ , "compiler/java/java_message_lite.cc"
+ , "compiler/java/java_name_resolver.cc"
+ , "compiler/java/java_primitive_field.cc"
+ , "compiler/java/java_primitive_field_lite.cc"
+ , "compiler/java/java_service.cc"
+ , "compiler/java/java_shared_code_generator.cc"
+ , "compiler/java/java_string_field.cc"
+ , "compiler/java/java_string_field_lite.cc"
+ , "compiler/js/js_generator.cc"
+ , "compiler/js/well_known_types_embed.cc"
+ , "compiler/objectivec/objectivec_enum.cc"
+ , "compiler/objectivec/objectivec_enum_field.cc"
+ , "compiler/objectivec/objectivec_extension.cc"
+ , "compiler/objectivec/objectivec_field.cc"
+ , "compiler/objectivec/objectivec_file.cc"
+ , "compiler/objectivec/objectivec_generator.cc"
+ , "compiler/objectivec/objectivec_helpers.cc"
+ , "compiler/objectivec/objectivec_map_field.cc"
+ , "compiler/objectivec/objectivec_message.cc"
+ , "compiler/objectivec/objectivec_message_field.cc"
+ , "compiler/objectivec/objectivec_oneof.cc"
+ , "compiler/objectivec/objectivec_primitive_field.cc"
+ , "compiler/php/php_generator.cc"
+ , "compiler/plugin.cc"
+ , "compiler/plugin.pb.cc"
+ , "compiler/python/python_generator.cc"
+ , "compiler/ruby/ruby_generator.cc"
+ , "compiler/subprocess.cc"
+ , "compiler/zip_writer.cc"
+ ]
+ , "deps": ["protobuf"]
+ }
+
+, "protoc":
+ { "type": ["@", "rules", "CC", "binary"]
+ , "name": ["protoc"]
+ , "srcs": ["compiler/main.cc"]
+ , "deps": ["protoc_lib"]
+ , "link external": ["-pthread"]
+ }
+
+, "well_known_proto_files":
+ { "type": "install"
+ , "deps":
+ [ "any.proto"
+ , "api.proto"
+ , "compiler/plugin.proto"
+ , "descriptor.proto"
+ , "duration.proto"
+ , "empty.proto"
+ , "field_mask.proto"
+ , "source_context.proto"
+ , "struct.proto"
+ , "timestamp.proto"
+ , "type.proto"
+ , "wrappers.proto"
+ ]
+ }
+
+, "well_known_protos":
+ { "type": "install"
+ , "dirs": [["well_known_proto_files", "google/protobuf"]]
+ }
+}
diff --git a/etc/import/src/include/openssl/TARGETS.boringssl b/etc/import/src/include/openssl/TARGETS.boringssl
new file mode 100644
index 00000000..f10ebcce
--- /dev/null
+++ b/etc/import/src/include/openssl/TARGETS.boringssl
@@ -0,0 +1,91 @@
+{ "crypto_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["openssl"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "aead.h"
+ , "aes.h"
+ , "arm_arch.h"
+ , "asn1.h"
+ , "asn1_mac.h"
+ , "asn1t.h"
+ , "base.h"
+ , "base64.h"
+ , "bio.h"
+ , "blowfish.h"
+ , "bn.h"
+ , "buf.h"
+ , "buffer.h"
+ , "bytestring.h"
+ , "cast.h"
+ , "chacha.h"
+ , "cipher.h"
+ , "cmac.h"
+ , "conf.h"
+ , "cpu.h"
+ , "crypto.h"
+ , "curve25519.h"
+ , "des.h"
+ , "dh.h"
+ , "digest.h"
+ , "dsa.h"
+ , "e_os2.h"
+ , "ec.h"
+ , "ec_key.h"
+ , "ecdh.h"
+ , "ecdsa.h"
+ , "engine.h"
+ , "err.h"
+ , "evp.h"
+ , "ex_data.h"
+ , "hkdf.h"
+ , "hmac.h"
+ , "hrss.h"
+ , "is_boringssl.h"
+ , "lhash.h"
+ , "md4.h"
+ , "md5.h"
+ , "mem.h"
+ , "nid.h"
+ , "obj.h"
+ , "obj_mac.h"
+ , "objects.h"
+ , "opensslconf.h"
+ , "opensslv.h"
+ , "ossl_typ.h"
+ , "pem.h"
+ , "pkcs12.h"
+ , "pkcs7.h"
+ , "pkcs8.h"
+ , "poly1305.h"
+ , "pool.h"
+ , "rand.h"
+ , "rc4.h"
+ , "ripemd.h"
+ , "rsa.h"
+ , "safestack.h"
+ , "sha.h"
+ , "siphash.h"
+ , "span.h"
+ , "stack.h"
+ , "thread.h"
+ , "trust_token.h"
+ , "type_check.h"
+ , "x509.h"
+ , "x509_vfy.h"
+ , "x509v3.h"
+ ]
+ }
+, "ssl_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["openssl"]
+ , "public stage": ["yes"]
+ , "hdrs":
+ [ "dtls1.h"
+ , "srtp.h"
+ , "ssl.h"
+ , "ssl3.h"
+ , "tls1.h"
+ ]
+ }
+}
diff --git a/etc/import/third_party/TARGETS.grpc b/etc/import/third_party/TARGETS.grpc
new file mode 100644
index 00000000..754568ae
--- /dev/null
+++ b/etc/import/third_party/TARGETS.grpc
@@ -0,0 +1,115 @@
+{ "ares_build_h":
+ { "type": "install"
+ , "files": {"ares_build.h": "cares/ares_build.h"}
+ }
+, "ares_config_h":
+ { "type": "install"
+ , "arguments_config": ["OS", "ARCH", "TARGET_ARCH"]
+ , "files":
+ { "ares_config.h":
+ { "type": "let*"
+ , "bindings":
+ [ [ "PLATFORM"
+ , { "type": "join"
+ , "separator": "_"
+ , "$1":
+ [ {"type": "var", "name": "OS"}
+ , { "type": "var"
+ , "name": "TARGET_ARCH"
+ , "default": {"type": "var", "name": "ARCH"}
+ }
+ ]
+ }
+ ]
+ ]
+ , "body":
+ { "type": "cond"
+ , "cond":
+ [ [ { "type": "or"
+ , "$1":
+ [ { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "ios_x86_64"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "ios_armv7"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "ios_armv7s"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "ios_arm64"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "tvos_x86_64"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "tvos_arm64"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "watchos_i386"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "watchos_x86_64"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "watchos_armv7k"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "watchos_arm64_32"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "PLATFORM"}
+ , "$2": "darwin_x86_64"
+ }
+ , { "type": "=="
+ , "$1": {"type": "var", "name": "OS"}
+ , "$2": "darwin"
+ }
+ ]
+ }
+ , "cares/config_darwin/ares_config.h"
+ ]
+ , [ { "type": "=="
+ , "$1": {"type": "var", "name": "OS"}
+ , "$2": "windows"
+ }
+ , "cares/config_windows/ares_config.h"
+ ]
+ , [ { "type": "=="
+ , "$1": {"type": "var", "name": "OS"}
+ , "$2": "android"
+ }
+ , "cares/config_android/ares_config.h"
+ ]
+ ]
+ , "default": "cares/config_linux/ares_config.h"
+ }
+ }
+ }
+ }
+, "address_sorting":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["address_sorting"]
+ , "stage": ["third_party", "address_sorting"]
+ , "pure C": ["YES"]
+ , "srcs":
+ [ "address_sorting/address_sorting.c"
+ , "address_sorting/address_sorting_posix.c"
+ , "address_sorting/address_sorting_windows.c"
+ ]
+ , "hdrs": ["address_sorting/address_sorting_internal.h"]
+ , "deps":
+ [ ["./", "address_sorting/include/address_sorting", "address_sorting_headers"]
+ ]
+ }
+}
diff --git a/etc/import/third_party/address_sorting/include/address_sorting/TARGETS.grpc b/etc/import/third_party/address_sorting/include/address_sorting/TARGETS.grpc
new file mode 100644
index 00000000..f7b1d386
--- /dev/null
+++ b/etc/import/third_party/address_sorting/include/address_sorting/TARGETS.grpc
@@ -0,0 +1,7 @@
+{ "address_sorting_headers":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "stage": ["address_sorting"]
+ , "public stage": ["yes"]
+ , "hdrs": ["address_sorting.h"]
+ }
+}
diff --git a/etc/repos.json b/etc/repos.json
new file mode 100644
index 00000000..5643b2d7
--- /dev/null
+++ b/etc/repos.json
@@ -0,0 +1,248 @@
+{ "repositories":
+ { "just":
+ { "repository": {"type": "file", "path": "."}
+ , "bindings":
+ { "rules": "rules"
+ , "gsl-lite": "gsl-lite"
+ , "cli11": "cli11"
+ , "json": "json"
+ , "fmt": "fmt"
+ , "ssl": "ssl"
+ , "grpc": "com_github_grpc_grpc"
+ , "googleapis": "google_apis"
+ , "bazel_remote_apis": "bazel_remote_apis"
+ , "libgit2": "com_github_libgit2_libgit2"
+ , "catch2": "catch2"
+ }
+ , "bootstrap": {"link": ["-lgit2", "-lpthread"]}
+ }
+ , "defaults": {"repository": {"type": "file", "path": "etc/defaults"}}
+ , "rules":
+ { "repository": {"type": "file", "path": "rules"}
+ , "target_root": "defaults"
+ , "rule_root": "rules"
+ , "bindings": {"protoc": "protobuf", "grpc": "com_github_grpc_grpc"}
+ }
+ , "rules-nowarn":
+ { "repository": "rules"
+ , "target_root": "defaults"
+ , "rule_root": "rules"
+ , "target_file_name": "TARGETS.nowerror"
+ }
+ , "rules-boringssl":
+ { "repository": "rules"
+ , "target_root": "defaults"
+ , "rule_root": "rules"
+ , "target_file_name": "TARGETS.boringssl"
+ }
+ , "rules-protobuf":
+ { "repository": "rules"
+ , "target_root": "defaults"
+ , "rule_root": "rules"
+ , "target_file_name": "TARGETS.protobuf"
+ , "bindings": {"protoc": "protobuf", "grpc": "com_github_grpc_grpc"}
+ }
+ , "import targets": {"repository": {"type": "file", "path": "etc/import"}}
+ , "gsl-lite":
+ { "repository":
+ { "type": "archive"
+ , "content": "ecbc51f342f7ad97ed4c236f36d2fb2279240d7b"
+ , "distfile": "0.37.0.tar.gz"
+ , "fetch": "https://github.com/gsl-lite/gsl-lite/archive/0.37.0.tar.gz"
+ , "subdir": "gsl-lite-0.37.0/include/gsl"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.gsl"
+ , "bindings": {"rules": "rules"}
+ }
+ , "cli11":
+ { "repository":
+ { "type": "archive"
+ , "content": "3a4cf02677a25fdb4ca618de211297d001a2e7fd"
+ , "fetch": "https://github.com/CLIUtils/CLI11/archive/v1.9.1.tar.gz"
+ , "subdir": "CLI11-1.9.1/"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.cli11"
+ , "bindings": {"rules": "rules"}
+ , "bootstrap": {"include_dir": "include/CLI", "include_name": "CLI"}
+ }
+ , "json":
+ { "repository":
+ { "type": "zip"
+ , "content": "eb7ab4ad48f9fb6758cff4a39b76f35abead5881"
+ , "fetch": "https://github.com/nlohmann/json/releases/download/v3.9.1/include.zip"
+ , "subdir": "include/nlohmann"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.json"
+ , "bindings": {"rules": "rules"}
+ , "bootstrap": {"include_name": "nlohmann"}
+ }
+ , "fmt":
+ { "repository":
+ { "type": "zip"
+ , "content": "939f915e9957eda2194ecf8874149e903c99d071"
+ , "fetch": "https://github.com/fmtlib/fmt/releases/download/7.0.3/fmt-7.0.3.zip"
+ , "subdir": "fmt-7.0.3"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.fmt"
+ , "bindings": {"rules": "rules-nowarn"}
+ , "bootstrap":
+ { "include_dir": "include/fmt"
+ , "build": "cd src && clang++ -I ../include -c *.cc && ar cqs ../libfmt.a *.o"
+ , "link": ["-lfmt"]
+ }
+ }
+ , "ssl":
+ { "repository":
+ { "type": "archive"
+ , "content": "cdf51ff27d78e1aceb7cc01d03f9a115826501be"
+ , "fetch": "https://github.com/google/boringssl/archive/e8a935e323510419e0b37638716f6df4dcbbe6f6.tar.gz"
+ , "subdir": "boringssl-e8a935e323510419e0b37638716f6df4dcbbe6f6"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.boringssl"
+ , "bindings": {"rules": "rules-boringssl"}
+ , "bootstrap":
+ { "build": "SYS=`uname -s | tr 'A-Z' 'a-z'` && ARCH=`uname -m` && cc -I . -I src/include -c src/crypto/fipsmodule/*.c $SYS-$ARCH/crypto/fipsmodule/*.S && ar cqs libcryto.a *.o"
+ , "link": ["-lcrypto"]
+ }
+ }
+ , "protobuf":
+ { "repository":
+ { "type": "archive"
+ , "content": "36bbde62df284bec435f1de559094313500ade16"
+ , "fetch": "https://github.com/protocolbuffers/protobuf/archive/678da4f76eb9168c9965afc2149944a66cd48546.tar.gz"
+ , "subdir": "protobuf-678da4f76eb9168c9965afc2149944a66cd48546"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.protobuf"
+ , "bindings": {"rules": "rules-protobuf"}
+ }
+ , "bazel_remote_apis":
+ { "repository":
+ { "type": "archive"
+ , "content": "b5deb95d544f03f1918cc9d611c7904b8173befa"
+ , "fetch": "https://github.com/bazelbuild/remote-apis/archive/v2.0.0.tar.gz"
+ , "subdir": "remote-apis-2.0.0"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.bazel_remote_apis"
+ , "bindings":
+ { "rules": "rules-protobuf"
+ , "protoc": "protobuf"
+ , "google_apis": "google_apis"
+ }
+ }
+ , "google_apis":
+ { "repository":
+ { "type": "zip"
+ , "content": "8a1ca602cb7eb42094c853f0640489599962c2cc"
+ , "fetch": "https://github.com/googleapis/googleapis/archive/143084a2624b6591ee1f9d23e7f5241856642f4d.zip"
+ , "subdir": "googleapis-143084a2624b6591ee1f9d23e7f5241856642f4d"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.google_apis"
+ , "bindings": {"rules": "rules-protobuf", "protoc": "protobuf"}
+ }
+ , "upb":
+ { "repository":
+ { "type": "archive"
+ , "content": "a234f5d2ccff01ee0a36e016b482276c5078905d"
+ , "fetch": "https://github.com/protocolbuffers/upb/archive/92e63da73328d01b417cf26c2de7b0a27a0f83af.tar.gz"
+ , "subdir": "upb-92e63da73328d01b417cf26c2de7b0a27a0f83af"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.upb"
+ , "bindings": {"rules": "rules"}
+ }
+ , "com_google_absl":
+ { "repository":
+ { "type": "archive"
+ , "content": "d9ba22c59e08577e0986c6d483f33c9fa7b2e104"
+ , "fetch": "https://github.com/abseil/abseil-cpp/archive/df3ea785d8c30a9503321a3d35ee7d35808f190d.tar.gz"
+ , "subdir": "abseil-cpp-df3ea785d8c30a9503321a3d35ee7d35808f190d"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.absl"
+ , "bindings": {"rules": "rules"}
+ }
+ , "zlib":
+ { "repository":
+ { "type": "archive"
+ , "content": "c47b5e6e3db9dd9f5dfec2ba28428a0444d1c052"
+ , "fetch": "https://github.com/madler/zlib/archive/cacf7f1d4e3d44d871b605da3b647f07d718623f.tar.gz"
+ , "subdir": "zlib-cacf7f1d4e3d44d871b605da3b647f07d718623f"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.zlib"
+ , "bindings": {"rules": "rules"}
+ }
+ , "re2":
+ { "repository":
+ { "type": "archive"
+ , "content": "987bf33d9e876431f4ab3c630ff08605f58b98a7"
+ , "fetch": "https://github.com/google/re2/archive/aecba11114cf1fac5497aeb844b6966106de3eb6.tar.gz"
+ , "subdir": "re2-aecba11114cf1fac5497aeb844b6966106de3eb6"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.re2"
+ , "bindings": {"rules": "rules"}
+ }
+ , "com_github_cares_cares":
+ { "repository":
+ { "type": "archive"
+ , "content": "229bb6835455e73a550e4dc44f8ddac22dc34aa7"
+ , "fetch": "https://github.com/c-ares/c-ares/archive/e982924acee7f7313b4baa4ee5ec000c5e373c30.tar.gz"
+ , "subdir": "c-ares-e982924acee7f7313b4baa4ee5ec000c5e373c30"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.cares"
+ , "bindings": {"rules": "rules", "grpc": "com_github_grpc_grpc"}
+ }
+ , "com_github_grpc_grpc":
+ { "repository":
+ { "type": "archive"
+ , "content": "23f49d3b842f2e916c861d5150e4b7d048084888"
+ , "fetch": "https://github.com/grpc/grpc/archive/v1.31.0.tar.gz"
+ , "subdir": "grpc-1.31.0"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.grpc"
+ , "bindings":
+ { "rules": "rules-protobuf"
+ , "protobuf": "protobuf"
+ , "libssl": "ssl"
+ , "absl": "com_google_absl"
+ , "upb": "upb"
+ , "zlib": "zlib"
+ , "re2": "re2"
+ , "cares": "com_github_cares_cares"
+ }
+ }
+ , "com_github_libgit2_libgit2":
+ { "repository":
+ { "type": "archive"
+ , "content": "15b9b9ac0236534922b46c301b0f791413ac8bae"
+ , "fetch": "https://github.com/libgit2/libgit2/releases/download/v1.1.0/libgit2-1.1.0.tar.gz"
+ , "subdir": "libgit2-1.1.0"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.git2"
+ , "bindings": {"rules": "rules", "zlib": "zlib", "ssl": "ssl"}
+ }
+ , "catch2":
+ { "repository":
+ { "type": "archive"
+ , "content": "a154ae9e2daad22c95cff6186b18637d4e5f338c"
+ , "fetch": "https://github.com/catchorg/Catch2/archive/v2.13.1.tar.gz"
+ , "subdir": "Catch2-2.13.1/single_include/catch2"
+ }
+ , "target_root": "import targets"
+ , "target_file_name": "TARGETS.catch2"
+ , "bindings": {"rules": "rules"}
+ }
+ }
+}
diff --git a/rules/CC/EXPRESSIONS b/rules/CC/EXPRESSIONS
new file mode 100644
index 00000000..f1e6592b
--- /dev/null
+++ b/rules/CC/EXPRESSIONS
@@ -0,0 +1,504 @@
+{ "default-CC":
+ { "expression":
+ { "type": "join"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "defaults"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "provider": "CC"
+ , "dep": {"type": "var", "name": "x"}
+ }
+ }
+ }
+ }
+ }
+, "default-CXX":
+ { "expression":
+ { "type": "join"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "defaults"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "provider": "CXX"
+ , "dep": {"type": "var", "name": "x"}
+ }
+ }
+ }
+ }
+ }
+, "default-CFLAGS":
+ { "expression":
+ { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "defaults"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "provider": "CFLAGS"
+ , "dep": {"type": "var", "name": "x"}
+ }
+ }
+ }
+ }
+, "default-CXXFLAGS":
+ { "expression":
+ { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "defaults"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "provider": "CXXFLAGS"
+ , "dep": {"type": "var", "name": "x"}
+ }
+ }
+ }
+ }
+, "default-ENV":
+ { "expression":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "defaults"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "provider": "ENV"
+ , "dep": {"type": "var", "name": "x"}
+ }
+ }
+ }
+, "configure transition":
+ { "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "OS"
+ , { "type": "assert_non_empty"
+ , "msg": "Missing field \"os\" for \"configure\"."
+ , "$1": {"type": "join", "$1": {"type": "FIELD", "name": "os"}}
+ }
+ ]
+ , [ "ARCH"
+ , { "type": "assert_non_empty"
+ , "msg": "Missing field \"arch\" for \"configure\"."
+ , "$1": {"type": "join", "$1": {"type": "FIELD", "name": "arch"}}
+ }
+ ]
+ , ["HOST_ARCH", {"type": "var", "name": "ARCH"}]
+ , [ "TARGET_ARCH"
+ , {"type": "join", "$1": {"type": "FIELD", "name": "target_arch"}}
+ ]
+ , [ "TARGET_ARCH"
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "TARGET_ARCH"}
+ , "then": {"type": "var", "name": "TARGET_ARCH"}
+ , "else": {"type": "var", "name": "ARCH"}
+ }
+ ]
+ ]
+ , "body":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "OS"
+ , "value": {"type": "var", "name": "OS"}
+ }
+ , { "type": "singleton_map"
+ , "key": "ARCH"
+ , "value": {"type": "var", "name": "TARGET_ARCH"}
+ }
+ , { "type": "singleton_map"
+ , "key": "HOST_ARCH"
+ , "value": {"type": "var", "name": "HOST_ARCH"}
+ }
+ , { "type": "singleton_map"
+ , "key": "TARGET_ARCH"
+ , "value": {"type": "var", "name": "TARGET_ARCH"}
+ }
+ ]
+ }
+ }
+ }
+, "compile-deps":
+ { "expression":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "deps"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "dep"}
+ , "provider": "compile-deps"
+ , "default": {"type": "empty_map"}
+ }
+ }
+ , { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "deps"}
+ , "body":
+ {"type": "DEP_RUNFILES", "dep": {"type": "var", "name": "dep"}}
+ }
+ , { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "proto-deps"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "dep"}
+ , "provider": "compile-deps"
+ , "default": {"type": "empty_map"}
+ }
+ }
+ , { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "proto-deps"}
+ , "body":
+ {"type": "DEP_RUNFILES", "dep": {"type": "var", "name": "dep"}}
+ }
+ ]
+ }
+ }
+ }
+, "link-deps":
+ { "expression":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "deps"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "dep"}
+ , "provider": "link-deps"
+ , "default": {"type": "empty_map"}
+ }
+ }
+ , { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "deps"}
+ , "body":
+ {"type": "DEP_ARTIFACTS", "dep": {"type": "var", "name": "dep"}}
+ }
+ , { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "proto-deps"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "dep"}
+ , "provider": "link-deps"
+ , "default": {"type": "empty_map"}
+ }
+ }
+ , { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "proto-deps"}
+ , "body":
+ {"type": "DEP_ARTIFACTS", "dep": {"type": "var", "name": "dep"}}
+ }
+ ]
+ }
+ }
+ }
+, "objects":
+ { "vars": ["CXX", "CXXFLAGS", "ENV", "srcs", "compile-deps", "local hdrs"]
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "include tree"
+ , { "type": "singleton_map"
+ , "key": "include"
+ , "value":
+ {"type": "TREE", "$1": {"type": "var", "name": "compile-deps"}}
+ }
+ ]
+ , [ "all hdrs"
+ , { "type": "map_union"
+ , "$1":
+ [ {"type": "var", "name": "include tree"}
+ , { "type": "to_subdir"
+ , "subdir": "work"
+ , "$1": {"type": "var", "name": "local hdrs"}
+ }
+ ]
+ }
+ ]
+ ]
+ , "body":
+ { "type": "map_union"
+ , "$1":
+ { "type": "foreach_map"
+ , "var_key": "src_name"
+ , "var_val": "src_val"
+ , "range": {"type": "var", "name": "srcs"}
+ , "body":
+ { "type": "let*"
+ , "bindings":
+ [ [ "work src_name"
+ , { "type": "join"
+ , "separator": "/"
+ , "$1": ["work", {"type": "var", "name": "src_name"}]
+ }
+ ]
+ , [ "inputs"
+ , { "type": "map_union"
+ , "$1":
+ [ {"type": "var", "name": "all hdrs"}
+ , { "type": "singleton_map"
+ , "key": {"type": "var", "name": "work src_name"}
+ , "value": {"type": "var", "name": "src_val"}
+ }
+ ]
+ }
+ ]
+ , [ "out"
+ , { "type": "change_ending"
+ , "$1": {"type": "var", "name": "src_name"}
+ , "ending": ".o"
+ }
+ ]
+ , [ "work out"
+ , { "type": "join"
+ , "separator": "/"
+ , "$1": ["work", {"type": "var", "name": "out"}]
+ }
+ ]
+ , [ "action output"
+ , { "type": "ACTION"
+ , "outs": [{"type": "var", "name": "work out"}]
+ , "inputs": {"type": "var", "name": "inputs"}
+ , "cmd":
+ { "type": "++"
+ , "$1":
+ [ [{"type": "var", "name": "CXX"}]
+ , {"type": "var", "name": "CXXFLAGS"}
+ , ["-I", "work", "-isystem", "include"]
+ , ["-c", {"type": "var", "name": "work src_name"}]
+ , ["-o", {"type": "var", "name": "work out"}]
+ ]
+ }
+ }
+ ]
+ , [ "staged output artifact"
+ , { "type": "map_union"
+ , "$1":
+ { "type": "foreach_map"
+ , "range": {"type": "var", "name": "action output"}
+ , "var_val": "object"
+ , "body":
+ { "type": "singleton_map"
+ , "key": {"type": "var", "name": "out"}
+ , "value": {"type": "var", "name": "object"}
+ }
+ }
+ }
+ ]
+ ]
+ , "body": {"type": "var", "name": "staged output artifact"}
+ }
+ }
+ }
+ }
+ }
+, "lib result":
+ { "vars":
+ [ "CXX"
+ , "CXXFLAGS"
+ , "CC"
+ , "CFLAGS"
+ , "ENV"
+ , "AR"
+ , "srcs"
+ , "hdrs"
+ , "private-hdrs"
+ , "link external"
+ , "extra-provides"
+ ]
+ , "imports":
+ { "artifacts": ["./", "..", "field_artifacts"]
+ , "compile-deps": "compile-deps"
+ , "link-deps": "link-deps"
+ , "objects": "objects"
+ , "default-CC": "default-CC"
+ , "default-CXX": "default-CXX"
+ , "default-CFLAGS": "default-CFLAGS"
+ , "default-CXXFLAGS": "default-CXXFLAGS"
+ , "default-ENV": "default-ENV"
+ }
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "stage"
+ , { "type": "join"
+ , "separator": "/"
+ , "$1": {"type": "FIELD", "name": "stage"}
+ }
+ ]
+ , [ "CXX"
+ , { "type": "if"
+ , "cond": {"type": "FIELD", "name": "pure C"}
+ , "then":
+ { "type": "var"
+ , "name": "CC"
+ , "default": {"type": "CALL_EXPRESSION", "name": "default-CC"}
+ }
+ , "else":
+ { "type": "var"
+ , "name": "CXX"
+ , "default": {"type": "CALL_EXPRESSION", "name": "default-CXX"}
+ }
+ }
+ ]
+ , [ "CXXFLAGS"
+ , { "type": "if"
+ , "cond": {"type": "FIELD", "name": "pure C"}
+ , "then":
+ { "type": "var"
+ , "name": "CFLAGS"
+ , "default": {"type": "CALL_EXPRESSION", "name": "default-CFLAGS"}
+ }
+ , "else":
+ { "type": "var"
+ , "name": "CXXFLAGS"
+ , "default":
+ {"type": "CALL_EXPRESSION", "name": "default-CXXFLAGS"}
+ }
+ }
+ ]
+ , [ "ENV"
+ , { "type": "map_union"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ {"type": "CALL_EXPRESSION", "name": "default-ENV"}
+ , [ { "type": "var"
+ , "name": "ENV"
+ , "default": {"type": "empty_map"}
+ }
+ ]
+ ]
+ }
+ }
+ ]
+ , ["compile-deps", {"type": "CALL_EXPRESSION", "name": "compile-deps"}]
+ , ["link-deps", {"type": "CALL_EXPRESSION", "name": "link-deps"}]
+ , [ "local hdrs"
+ , { "type": "disjoint_map_union"
+ , "$1":
+ [ {"type": "var", "name": "hdrs"}
+ , {"type": "var", "name": "private-hdrs"}
+ ]
+ }
+ ]
+ , ["objects", {"type": "CALL_EXPRESSION", "name": "objects"}]
+ , [ "base name"
+ , {"type": "join", "$1": {"type": "FIELD", "name": "name"}}
+ ]
+ , [ "libname"
+ , { "type": "join"
+ , "$1": ["lib", {"type": "var", "name": "base name"}, ".a"]
+ }
+ ]
+ , [ "lib"
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "objects"}
+ , "else": {"type": "empty_map"}
+ , "then":
+ { "type": "ACTION"
+ , "outs": [{"type": "var", "name": "libname"}]
+ , "inputs": {"type": "var", "name": "objects"}
+ , "cmd":
+ { "type": "++"
+ , "$1":
+ [ [ {"type": "var", "name": "AR", "default": "ar"}
+ , "cqs"
+ , {"type": "var", "name": "libname"}
+ ]
+ , {"type": "keys", "$1": {"type": "var", "name": "objects"}}
+ ]
+ }
+ }
+ }
+ ]
+ , [ "lib"
+ , { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1": {"type": "var", "name": "lib"}
+ }
+ ]
+ , [ "link-args"
+ , { "type": "nub_right"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ {"type": "keys", "$1": {"type": "var", "name": "lib"}}
+ , { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "deps"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "dep"}
+ , "provider": "link-args"
+ }
+ }
+ }
+ , { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "proto-deps"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "dep"}
+ , "provider": "link-args"
+ }
+ }
+ }
+ , {"type": "var", "name": "link external", "default": []}
+ ]
+ }
+ }
+ ]
+ ]
+ , "body":
+ { "type": "RESULT"
+ , "artifacts": {"type": "var", "name": "lib"}
+ , "runfiles": {"type": "var", "name": "hdrs"}
+ , "provides":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "compile-deps"
+ , "value": {"type": "var", "name": "compile-deps"}
+ }
+ , { "type": "singleton_map"
+ , "key": "link-deps"
+ , "value": {"type": "var", "name": "link-deps"}
+ }
+ , { "type": "singleton_map"
+ , "key": "link-args"
+ , "value": {"type": "var", "name": "link-args"}
+ }
+ , { "type": "var"
+ , "name": "extra-provides"
+ , "default": {"type": "empty_map"}
+ }
+ ]
+ }
+ }
+ }
+ }
+}
diff --git a/rules/CC/RULES b/rules/CC/RULES
new file mode 100644
index 00000000..93d5afc4
--- /dev/null
+++ b/rules/CC/RULES
@@ -0,0 +1,538 @@
+{ "defaults":
+ { "doc":
+ [ "A rule to provide defaults."
+ , "All CC targets take their defaults for CXX, CC, flags, etc from"
+ , "the target [\"CC\", \"defaults\"]. This is probably the only sensibe"
+ , "use of this rule. As targets form a different root, the defaults"
+ , "can be provided without changing this directory."
+ ]
+ , "string_fields": ["CC", "CXX", "CFLAGS", "CXXFLAGS", "PATH", "AR"]
+ , "expression":
+ { "type": "RESULT"
+ , "provides":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "CC"
+ , "value": {"type": "FIELD", "name": "CC"}
+ }
+ , { "type": "singleton_map"
+ , "key": "CXX"
+ , "value": {"type": "FIELD", "name": "CXX"}
+ }
+ , { "type": "singleton_map"
+ , "key": "CFLAGS"
+ , "value": {"type": "FIELD", "name": "CFLAGS"}
+ }
+ , { "type": "singleton_map"
+ , "key": "CXXFLAGS"
+ , "value": {"type": "FIELD", "name": "CXXFLAGS"}
+ }
+ , { "type": "singleton_map"
+ , "key": "AR"
+ , "value": {"type": "FIELD", "name": "AR"}
+ }
+ , { "type": "singleton_map"
+ , "key": "ENV"
+ , "value":
+ { "type": "singleton_map"
+ , "key": "PATH"
+ , "value":
+ { "type": "join"
+ , "separator": ":"
+ , "$1": {"type": "FIELD", "name": "PATH"}
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+, "configure":
+ { "doc":
+ [ "A rule to provide a static platform configuration for a target."
+ , "The artifacts and runfiles of the specified target will be propagated."
+ , "The target defined by this rule does not propagate any provides data."
+ ]
+ , "config_fields": ["os", "arch", "target_arch"]
+ , "target_fields": ["target"]
+ , "field_doc":
+ { "os": ["The operation system used for building."]
+ , "arch": ["The architecture used for building."]
+ , "target_arch":
+ [ "Non-mandatory target architecture to build for. If omitted, target"
+ , "architecture is derived from \"arch\"."
+ ]
+ , "target":
+ [ "The target to configure. Multiple targets are supported, but their"
+ , "artifacts and runfiles should not conflict."
+ ]
+ }
+ , "imports":
+ { "transition": "configure transition"
+ , "artifacts": ["./", "..", "field_artifacts"]
+ , "runfiles": ["./", "..", "field_runfiles"]
+ }
+ , "config_transitions":
+ {"target": [{"type": "CALL_EXPRESSION", "name": "transition"}]}
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ ["fieldname", "target"]
+ , ["transition", {"type": "CALL_EXPRESSION", "name": "transition"}]
+ ]
+ , "body":
+ { "type": "RESULT"
+ , "artifacts": {"type": "CALL_EXPRESSION", "name": "artifacts"}
+ , "runfiles": {"type": "CALL_EXPRESSION", "name": "runfiles"}
+ }
+ }
+ }
+, "header directory":
+ { "doc":
+ [ "A directory of header files."
+ , ""
+ , "Define a directory of header files that belong together and are staged"
+ , "in such a way that no other target (used together with this target) will"
+ , "have to put files in this directory. The typical use case is a library"
+ , "libfoo that expects all headers to be included as #include \"foo/bar.h\"."
+ , "In this case, one would define a header direcotry for \"foo\"."
+ , ""
+ , "Technically, a tree is created from the given files and staged to the"
+ , "specified location. Since trees are opaque, the directory name becomes"
+ , "essentially owned by target. In this way, staging conflicts can be"
+ , "avoided by detecting them early and not only once a file with the same"
+ , "name is added to the staging location. Also, as only a tree identifier"
+ , "has to be passed around, such a directory can be handled more"
+ , "efficiently by the tool."
+ ]
+ , "target_fields": ["hdrs"]
+ , "string_fields": ["stage", "public stage"]
+ , "field_doc":
+ { "hdrs": ["The header files to be put into the header directory."]
+ , "stage":
+ [ "The location of the header directory."
+ , "Path segments are joined with \"/\"."
+ ]
+ , "public stage":
+ [ "If non-empty, no closure for the header directory's stage is created, "
+ , "so can be combined with other header directories having the same "
+ , "public staging directory."
+ ]
+ }
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "hdrs"
+ , { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "hdrs"}
+ , "body":
+ {"type": "DEP_RUNFILES", "dep": {"type": "var", "name": "x"}}
+ }
+ }
+ ]
+ , [ "stage"
+ , { "type": "join"
+ , "separator": "/"
+ , "$1": {"type": "FIELD", "name": "stage"}
+ }
+ ]
+ , [ "dir"
+ , { "type": "if"
+ , "cond": {"type": "FIELD", "name": "public stage"}
+ , "then":
+ { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1": {"type": "var", "name": "hdrs"}
+ }
+ , "else":
+ { "type": "singleton_map"
+ , "key": {"type": "var", "name": "stage"}
+ , "value": {"type": "TREE", "$1": {"type": "var", "name": "hdrs"}}
+ }
+ }
+ ]
+ ]
+ , "body":
+ { "type": "RESULT"
+ , "artifacts": {"type": "var", "name": "dir"}
+ , "runfiles": {"type": "var", "name": "dir"}
+ }
+ }
+ }
+, "library":
+ { "doc": ["A C++ libaray"]
+ , "target_fields": ["srcs", "hdrs", "private-hdrs", "deps", "proto"]
+ , "string_fields":
+ ["name", "stage", "pure C", "local defines", "link external"]
+ , "config_vars": ["CXX", "CC", "CXXFLAGS", "CFLAGS", "ENV", "AR"]
+ , "implicit": {"defaults": ["defaults"]}
+ , "field_doc":
+ { "name":
+ ["The name of the library (without leading \"lib\" or trailing \".a\""]
+ , "srcs": ["The source files of the library."]
+ , "hdrs": ["Any public header files of the library."]
+ , "private-hdrs":
+ [ "Any header files that only need to be present when compiling the"
+ , "source files, but are not needed for any consumer of the library"
+ ]
+ , "stage":
+ [ "The logical location of all header and source files, as well as the"
+ , "resulting library file. Individual directory components are joined"
+ , "with \"/\"."
+ ]
+ , "pure C":
+ [ "If non-empty, compile as C sources rathter than C++ sources."
+ , "In particular, CC is used to compile rather than CXX"
+ ]
+ , "local defines":
+ [ "List of defines set for source files local to this target."
+ , "Each list entry will be prepended by \"-D\"."
+ ]
+ , "link external":
+ ["Additional linker flags for linking external libraries."]
+ , "deps": ["Any other libraries this library depends upon."]
+ }
+ , "config_doc":
+ { "CXX": ["The name of the C++ compiler to be used."]
+ , "CC":
+ ["The name of the C compiler to be used (when compiling pure C code)"]
+ , "AR": ["The archive tool to used for creating the library"]
+ , "ENV": ["The environment for any action generated."]
+ , "CXXFLAGS":
+ [ "The flags for CXX to be used instead of the default ones."
+ , "For libraries that should be built in a non-standard way; usually"
+ , "adapting the default target [\"CC\", \"defaults\"] is the better"
+ , "choice"
+ ]
+ , "CFLAGS":
+ [ "The flags for CC to be used instead of the default ones."
+ , "For libraries that should be built in a non-standard way; usually"
+ , "adapting the default target [\"CC\", \"defaults\"] is the better"
+ , "choice"
+ ]
+ }
+ , "anonymous":
+ { "proto-deps":
+ { "target": "proto"
+ , "provider": "proto"
+ , "rule_map":
+ { "library": ["./", "proto", "library"]
+ , "service library": ["./", "proto", "service library"]
+ }
+ }
+ }
+ , "imports":
+ { "artifacts": ["./", "..", "field_artifacts"]
+ , "default-CXXFLAGS": "default-CXXFLAGS"
+ , "default-CFLAGS": "default-CFLAGS"
+ , "result": "lib result"
+ }
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "local defines"
+ , { "type": "foreach"
+ , "var": "def"
+ , "range": {"type": "FIELD", "name": "local defines"}
+ , "body":
+ {"type": "join", "$1": ["-D", {"type": "var", "name": "def"}]}
+ }
+ ]
+ , [ "CFLAGS"
+ , { "type": "++"
+ , "$1":
+ [ { "type": "var"
+ , "name": "CFLAGS"
+ , "default":
+ {"type": "CALL_EXPRESSION", "name": "default-CFLAGS"}
+ }
+ , {"type": "var", "name": "local defines"}
+ ]
+ }
+ ]
+ , [ "CXXFLAGS"
+ , { "type": "++"
+ , "$1":
+ [ { "type": "var"
+ , "name": "CXXFLAGS"
+ , "default":
+ {"type": "CALL_EXPRESSION", "name": "default-CXXFLAGS"}
+ }
+ , {"type": "var", "name": "local defines"}
+ ]
+ }
+ ]
+ , [ "stage"
+ , { "type": "join"
+ , "separator": "/"
+ , "$1": {"type": "FIELD", "name": "stage"}
+ }
+ ]
+ , [ "srcs"
+ , { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1":
+ { "type": "let*"
+ , "bindings": [["fieldname", "srcs"]]
+ , "body": {"type": "CALL_EXPRESSION", "name": "artifacts"}
+ }
+ }
+ ]
+ , [ "hdrs"
+ , { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1":
+ { "type": "let*"
+ , "bindings": [["fieldname", "hdrs"]]
+ , "body": {"type": "CALL_EXPRESSION", "name": "artifacts"}
+ }
+ }
+ ]
+ , [ "private-hdrs"
+ , { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1":
+ { "type": "let*"
+ , "bindings": [["fieldname", "private-hdrs"]]
+ , "body": {"type": "CALL_EXPRESSION", "name": "artifacts"}
+ }
+ }
+ ]
+ , ["link external", {"type": "FIELD", "name": "link external"}]
+ ]
+ , "body": {"type": "CALL_EXPRESSION", "name": "result"}
+ }
+ }
+, "binary":
+ { "doc": ["A binary written in C++"]
+ , "target_fields": ["srcs", "private-hdrs", "deps", "proto"]
+ , "string_fields":
+ ["name", "stage", "pure C", "local defines", "link external"]
+ , "config_vars": ["CXX", "CC", "CXXFLAGS", "CFLAGS", "ENV"]
+ , "implicit": {"defaults": ["defaults"]}
+ , "field_doc":
+ { "name": ["The name of the binary"]
+ , "srcs": ["The source files of the library."]
+ , "private-hdrs":
+ [ "Any header files that need to be present when compiling the"
+ , "source files."
+ ]
+ , "stage":
+ [ "The logical location of all header and source files, as well as the"
+ , "resulting binary file. Individual directory components are joined"
+ , "with \"/\"."
+ ]
+ , "pure C":
+ [ "If non-empty, compile as C sources rathter than C++ sources."
+ , "In particular, CC is used to compile rather than CXX"
+ ]
+ , "local defines":
+ [ "List of defines set for source files local to this target."
+ , "Each list entry will be prepended by \"-D\"."
+ ]
+ , "link external":
+ ["Additional linker flags for linking external libraries."]
+ , "deps": ["Any other libraries this binary depends upon."]
+ }
+ , "config_doc":
+ { "CXX": ["The name of the C++ compiler to be used."]
+ , "CC":
+ ["The name of the C compiler to be used (when compiling pure C code)"]
+ , "ENV": ["The environment for any action generated."]
+ , "CXXFLAGS":
+ [ "The flags for CXX to be used instead of the default ones"
+ , "taken from the [\"CC\", \"defaults\"] target"
+ ]
+ , "CFLAGS":
+ [ "The flags for CXX to be used instead of the default ones"
+ , "taken from the [\"CC\", \"defaults\"] target"
+ ]
+ }
+ , "anonymous":
+ { "proto-deps":
+ { "target": "proto"
+ , "provider": "proto"
+ , "rule_map":
+ { "library": ["./", "proto", "library"]
+ , "service library": ["./", "proto", "service library"]
+ }
+ }
+ }
+ , "imports":
+ { "artifacts": ["./", "..", "field_artifacts"]
+ , "compile-deps": "compile-deps"
+ , "link-deps": "link-deps"
+ , "objects": "objects"
+ , "default-CC": "default-CC"
+ , "default-CXX": "default-CXX"
+ , "default-CFLAGS": "default-CFLAGS"
+ , "default-CXXFLAGS": "default-CXXFLAGS"
+ , "default-ENV": "default-ENV"
+ }
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "local defines"
+ , { "type": "foreach"
+ , "var": "def"
+ , "range": {"type": "FIELD", "name": "local defines"}
+ , "body":
+ {"type": "join", "$1": ["-D", {"type": "var", "name": "def"}]}
+ }
+ ]
+ , [ "stage"
+ , { "type": "join"
+ , "separator": "/"
+ , "$1": {"type": "FIELD", "name": "stage"}
+ }
+ ]
+ , [ "srcs"
+ , { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1":
+ { "type": "let*"
+ , "bindings": [["fieldname", "srcs"]]
+ , "body": {"type": "CALL_EXPRESSION", "name": "artifacts"}
+ }
+ }
+ ]
+ , [ "local hdrs"
+ , { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1":
+ { "type": "let*"
+ , "bindings": [["fieldname", "private-hdrs"]]
+ , "body": {"type": "CALL_EXPRESSION", "name": "artifacts"}
+ }
+ }
+ ]
+ , [ "CXX"
+ , { "type": "if"
+ , "cond": {"type": "FIELD", "name": "pure C"}
+ , "then":
+ { "type": "var"
+ , "name": "CC"
+ , "default": {"type": "CALL_EXPRESSION", "name": "default-CC"}
+ }
+ , "else":
+ { "type": "var"
+ , "name": "CXX"
+ , "default": {"type": "CALL_EXPRESSION", "name": "default-CXX"}
+ }
+ }
+ ]
+ , [ "CXXFLAGS"
+ , { "type": "if"
+ , "cond": {"type": "FIELD", "name": "pure C"}
+ , "then":
+ { "type": "var"
+ , "name": "CFLAGS"
+ , "default": {"type": "CALL_EXPRESSION", "name": "default-CFLAGS"}
+ }
+ , "else":
+ { "type": "var"
+ , "name": "CXXFLAGS"
+ , "default":
+ {"type": "CALL_EXPRESSION", "name": "default-CXXFLAGS"}
+ }
+ }
+ ]
+ , [ "CXXFLAGS"
+ , { "type": "++"
+ , "$1":
+ [ {"type": "var", "name": "CXXFLAGS"}
+ , {"type": "var", "name": "local defines"}
+ ]
+ }
+ ]
+ , [ "ENV"
+ , { "type": "map_union"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ {"type": "CALL_EXPRESSION", "name": "default-ENV"}
+ , [ { "type": "var"
+ , "name": "ENV"
+ , "default": {"type": "empty_map"}
+ }
+ ]
+ ]
+ }
+ }
+ ]
+ , ["compile-deps", {"type": "CALL_EXPRESSION", "name": "compile-deps"}]
+ , ["link-deps", {"type": "CALL_EXPRESSION", "name": "link-deps"}]
+ , ["objects", {"type": "CALL_EXPRESSION", "name": "objects"}]
+ , [ "base name"
+ , {"type": "join", "$1": {"type": "FIELD", "name": "name"}}
+ ]
+ , [ "binary name"
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "stage"}
+ , "else": {"type": "var", "name": "base name"}
+ , "then":
+ { "type": "join"
+ , "separator": "/"
+ , "$1":
+ [ {"type": "var", "name": "stage"}
+ , {"type": "var", "name": "base name"}
+ ]
+ }
+ }
+ ]
+ , [ "link-args"
+ , { "type": "nub_right"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ {"type": "keys", "$1": {"type": "var", "name": "objects"}}
+ , { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "deps"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "dep"}
+ , "provider": "link-args"
+ }
+ }
+ }
+ , {"type": "FIELD", "name": "link external"}
+ ]
+ }
+ }
+ ]
+ , [ "binary"
+ , { "type": "ACTION"
+ , "outs": [{"type": "var", "name": "binary name"}]
+ , "inputs":
+ { "type": "disjoint_map_union"
+ , "$1":
+ [ {"type": "var", "name": "objects"}
+ , {"type": "var", "name": "link-deps"}
+ ]
+ }
+ , "cmd":
+ { "type": "++"
+ , "$1":
+ [ [ {"type": "var", "name": "CXX"}
+ , "-o"
+ , {"type": "var", "name": "binary name"}
+ ]
+ , {"type": "var", "name": "link-args"}
+ ]
+ }
+ , "env": {"type": "var", "name": "ENV"}
+ }
+ ]
+ ]
+ , "body":
+ {"type": "RESULT", "artifacts": {"type": "var", "name": "binary"}}
+ }
+ }
+}
diff --git a/rules/CC/proto/EXPRESSIONS b/rules/CC/proto/EXPRESSIONS
new file mode 100644
index 00000000..a98c51ca
--- /dev/null
+++ b/rules/CC/proto/EXPRESSIONS
@@ -0,0 +1,301 @@
+{ "protoc-deps":
+ { "expression":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "deps"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "dep"}
+ , "provider": "protoc-deps"
+ , "default": {"type": "empty_map"}
+ }
+ }
+ }
+ }
+, "protoc-compile":
+ { "vars": ["transition", "service support"]
+ , "imports":
+ { "stage": ["", "stage_singleton_field"]
+ , "result": ["./", "..", "lib result"]
+ , "field_runfiles": ["", "field_runfiles"]
+ , "protoc-deps": "protoc-deps"
+ }
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "protoc"
+ , { "type": "let*"
+ , "bindings": [["fieldname", "protoc"], ["location", "protoc"]]
+ , "body": {"type": "CALL_EXPRESSION", "name": "stage"}
+ }
+ ]
+ , [ "grpc_cpp_plugin"
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "service support"}
+ , "then":
+ { "type": "let*"
+ , "bindings":
+ [ ["fieldname", "grpc_cpp_plugin"]
+ , ["location", "grpc_cpp_plugin"]
+ ]
+ , "body": {"type": "CALL_EXPRESSION", "name": "stage"}
+ }
+ , "else": {"type": "empty_map"}
+ }
+ ]
+ , ["protoc-deps", {"type": "CALL_EXPRESSION", "name": "protoc-deps"}]
+ , [ "proto deps"
+ , { "type": "to_subdir"
+ , "subdir": "work"
+ , "$1":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "well_known_protos"}
+ , "body":
+ {"type": "DEP_ARTIFACTS", "dep": {"type": "var", "name": "x"}}
+ }
+ }
+ }
+ ]
+ , [ "proto srcs"
+ , { "type": "disjoint_map_union"
+ , "msg": "Sources may not conflict"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "srcs"}
+ , "body":
+ {"type": "DEP_ARTIFACTS", "dep": {"type": "var", "name": "x"}}
+ }
+ }
+ ]
+ , [ "all proto srcs"
+ , { "type": "disjoint_map_union"
+ , "msg": "Conflict with proto files of dependencies"
+ , "$1":
+ [ {"type": "var", "name": "protoc-deps"}
+ , {"type": "var", "name": "proto srcs"}
+ ]
+ }
+ ]
+ , [ "staged srcs"
+ , { "type": "to_subdir"
+ , "subdir": "work"
+ , "$1": {"type": "var", "name": "proto srcs"}
+ }
+ ]
+ , [ "staged all proto srcs"
+ , { "type": "to_subdir"
+ , "subdir": "work"
+ , "$1": {"type": "var", "name": "all proto srcs"}
+ }
+ ]
+ , [ "outs"
+ , { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "f"
+ , "range":
+ {"type": "keys", "$1": {"type": "var", "name": "staged srcs"}}
+ , "body":
+ { "type": "++"
+ , "$1":
+ [ [ { "type": "change_ending"
+ , "$1": {"type": "var", "name": "f"}
+ , "ending": ".pb.h"
+ }
+ , { "type": "change_ending"
+ , "$1": {"type": "var", "name": "f"}
+ , "ending": ".pb.cc"
+ }
+ ]
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "service support"}
+ , "then":
+ [ { "type": "change_ending"
+ , "$1": {"type": "var", "name": "f"}
+ , "ending": ".grpc.pb.h"
+ }
+ , { "type": "change_ending"
+ , "$1": {"type": "var", "name": "f"}
+ , "ending": ".grpc.pb.cc"
+ }
+ ]
+ , "else": []
+ }
+ ]
+ }
+ }
+ }
+ ]
+ , [ "cmd"
+ , { "type": "++"
+ , "$1":
+ [ ["./protoc", "--proto_path=work", "--cpp_out=work"]
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "service support"}
+ , "then":
+ [ "--grpc_out=work"
+ , "--plugin=protoc-gen-grpc=./grpc_cpp_plugin"
+ ]
+ , "else": []
+ }
+ , {"type": "keys", "$1": {"type": "var", "name": "staged srcs"}}
+ ]
+ }
+ ]
+ , [ "generated"
+ , { "type": "ACTION"
+ , "inputs":
+ { "type": "map_union"
+ , "$1":
+ [ {"type": "var", "name": "staged all proto srcs"}
+ , {"type": "var", "name": "protoc"}
+ , {"type": "var", "name": "grpc_cpp_plugin"}
+ , {"type": "var", "name": "proto deps"}
+ ]
+ }
+ , "outs": {"type": "var", "name": "outs"}
+ , "cmd": {"type": "var", "name": "cmd"}
+ }
+ ]
+ , [ "srcs"
+ , { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "name"
+ , "range":
+ {"type": "keys", "$1": {"type": "var", "name": "proto srcs"}}
+ , "body":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key":
+ { "type": "change_ending"
+ , "$1": {"type": "var", "name": "name"}
+ , "ending": ".pb.cc"
+ }
+ , "value":
+ { "type": "lookup"
+ , "map": {"type": "var", "name": "generated"}
+ , "key":
+ { "type": "join"
+ , "$1":
+ [ "work/"
+ , { "type": "change_ending"
+ , "$1": {"type": "var", "name": "name"}
+ , "ending": ".pb.cc"
+ }
+ ]
+ }
+ }
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "service support"}
+ , "then":
+ { "type": "singleton_map"
+ , "key":
+ { "type": "change_ending"
+ , "$1": {"type": "var", "name": "name"}
+ , "ending": ".grpc.pb.cc"
+ }
+ , "value":
+ { "type": "lookup"
+ , "map": {"type": "var", "name": "generated"}
+ , "key":
+ { "type": "join"
+ , "$1":
+ [ "work/"
+ , { "type": "change_ending"
+ , "$1": {"type": "var", "name": "name"}
+ , "ending": ".grpc.pb.cc"
+ }
+ ]
+ }
+ }
+ }
+ , "else": {"type": "empty_map"}
+ }
+ ]
+ }
+ }
+ }
+ ]
+ , [ "hdrs"
+ , { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "name"
+ , "range":
+ {"type": "keys", "$1": {"type": "var", "name": "proto srcs"}}
+ , "body":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key":
+ { "type": "change_ending"
+ , "$1": {"type": "var", "name": "name"}
+ , "ending": ".pb.h"
+ }
+ , "value":
+ { "type": "lookup"
+ , "map": {"type": "var", "name": "generated"}
+ , "key":
+ { "type": "join"
+ , "$1":
+ [ "work/"
+ , { "type": "change_ending"
+ , "$1": {"type": "var", "name": "name"}
+ , "ending": ".pb.h"
+ }
+ ]
+ }
+ }
+ }
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "service support"}
+ , "then":
+ { "type": "singleton_map"
+ , "key":
+ { "type": "change_ending"
+ , "$1": {"type": "var", "name": "name"}
+ , "ending": ".grpc.pb.h"
+ }
+ , "value":
+ { "type": "lookup"
+ , "map": {"type": "var", "name": "generated"}
+ , "key":
+ { "type": "join"
+ , "$1":
+ [ "work/"
+ , { "type": "change_ending"
+ , "$1": {"type": "var", "name": "name"}
+ , "ending": ".grpc.pb.h"
+ }
+ ]
+ }
+ }
+ }
+ , "else": {"type": "empty_map"}
+ }
+ ]
+ }
+ }
+ }
+ ]
+ , ["private-hdrs", {"type": "empty_map"}]
+ , [ "extra-provides"
+ , { "type": "singleton_map"
+ , "key": "protoc-deps"
+ , "value": {"type": "var", "name": "all proto srcs"}
+ }
+ ]
+ ]
+ , "body": {"type": "CALL_EXPRESSION", "name": "result"}
+ }
+ }
+}
diff --git a/rules/CC/proto/RULES b/rules/CC/proto/RULES
new file mode 100644
index 00000000..04082c1b
--- /dev/null
+++ b/rules/CC/proto/RULES
@@ -0,0 +1,72 @@
+{ "library":
+ { "doc":
+ [ "A library C++ library, generated from proto files."
+ , ""
+ , "This rule usually is used to bind anonymous targets generated from"
+ , "proto libraries."
+ ]
+ , "string_fields": ["name", "stage"]
+ , "target_fields": ["srcs", "deps"]
+ , "config_vars":
+ ["OS", "ARCH", "HOST_ARCH", "CXX", "CC", "CXXFLAGS", "CFLAGS", "ENV", "AR"]
+ , "implicit":
+ { "protoc": [["@", "protoc", "", "protoc"]]
+ , "defaults": [["./", "..", "defaults"]]
+ , "proto-deps": [["@", "protoc", "", "C++ runtime"]]
+ , "well_known_protos": [["@", "protoc", "", "well_known_protos"]]
+ , "pure C": []
+ }
+ , "imports":
+ { "protoc-compile": "protoc-compile"
+ , "host transition": ["transitions", "for host"]
+ }
+ , "config_transitions":
+ {"protoc": [{"type": "CALL_EXPRESSION", "name": "host transition"}]}
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [["transition", {"type": "CALL_EXPRESSION", "name": "host transition"}]]
+ , "body": {"type": "CALL_EXPRESSION", "name": "protoc-compile"}
+ }
+ }
+, "service library":
+ { "doc":
+ [ "A service library C++ library, generated from proto files."
+ , ""
+ , "Calls protoc with gRPC plugin to additionally generate gRPC services"
+ , "from proto libraries."
+ ]
+ , "string_fields": ["name", "stage"]
+ , "target_fields": ["srcs", "deps"]
+ , "config_vars":
+ ["OS", "ARCH", "HOST_ARCH", "CXX", "CC", "CXXFLAGS", "CFLAGS", "ENV", "AR"]
+ , "implicit":
+ { "protoc": [["@", "protoc", "", "protoc"]]
+ , "grpc_cpp_plugin": [["@", "grpc", "src/compiler", "grpc_cpp_plugin"]]
+ , "defaults": [["./", "..", "defaults"]]
+ , "proto-deps":
+ [ ["@", "grpc", "", "grpc++_codegen_proto"]
+ , ["@", "protoc", "", "C++ runtime"]
+ ]
+ , "well_known_protos": [["@", "protoc", "", "well_known_protos"]]
+ , "pure C": []
+ }
+ , "imports":
+ { "protoc-compile": "protoc-compile"
+ , "host transition": ["transitions", "for host"]
+ }
+ , "config_transitions":
+ { "protoc": [{"type": "CALL_EXPRESSION", "name": "host transition"}]
+ , "grpc_cpp_plugin":
+ [{"type": "CALL_EXPRESSION", "name": "host transition"}]
+ }
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ ["service support", true]
+ , ["transition", {"type": "CALL_EXPRESSION", "name": "host transition"}]
+ ]
+ , "body": {"type": "CALL_EXPRESSION", "name": "protoc-compile"}
+ }
+ }
+}
diff --git a/rules/CC/test/RULES b/rules/CC/test/RULES
new file mode 100644
index 00000000..de5a4856
--- /dev/null
+++ b/rules/CC/test/RULES
@@ -0,0 +1,265 @@
+{ "test":
+ { "doc":
+ [ "A test written in C++"
+ , "FIXME: the test binary and data must be built for host"
+ ]
+ , "tainted": ["test"]
+ , "target_fields": ["srcs", "private-hdrs", "deps", "data"]
+ , "string_fields": ["name", "stage"]
+ , "config_vars": ["CXX", "CC", "CXXFLAGS", "CFLAGS", "ENV"]
+ , "implicit":
+ { "defaults": [["./", "..", "defaults"]]
+ , "proto-deps": []
+ , "runner": ["test_runner.sh"]
+ }
+ , "field_doc":
+ { "name":
+ [ "The name of the test"
+ , ""
+ , "Used to name the test binary as well as for staging the test result"
+ ]
+ , "srcs": ["The sources of the test binary"]
+ , "private-hdrs":
+ [ "Any additional header files that need to be present when compiling"
+ , "the test binary."
+ ]
+ , "stage":
+ [ "The logical location of all header and source files."
+ , "Individual directory components are joined with \"/\"."
+ ]
+ , "data": ["Any files the test binary needs access to when running"]
+ }
+ , "config_doc":
+ { "CXX": ["The name of the C++ compiler to be used."]
+ , "ENV": ["The environment for any action generated."]
+ , "CXXFLAGS":
+ [ "The flags for CXX to be used instead of the default ones"
+ , "taken from the [\"CC\", \"defaults\"] target"
+ ]
+ }
+ , "imports":
+ { "artifacts": ["./", "../..", "field_artifacts"]
+ , "compile-deps": ["./", "..", "compile-deps"]
+ , "link-deps": ["./", "..", "link-deps"]
+ , "objects": ["./", "..", "objects"]
+ , "default-CXX": ["./", "..", "default-CXX"]
+ , "default-CXXFLAGS": ["./", "..", "default-CXXFLAGS"]
+ , "default-ENV": ["./", "..", "default-ENV"]
+ }
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "stage"
+ , { "type": "join"
+ , "separator": "/"
+ , "$1": {"type": "FIELD", "name": "stage"}
+ }
+ ]
+ , [ "srcs"
+ , { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1":
+ { "type": "let*"
+ , "bindings": [["fieldname", "srcs"]]
+ , "body": {"type": "CALL_EXPRESSION", "name": "artifacts"}
+ }
+ }
+ ]
+ , [ "local hdrs"
+ , { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1":
+ { "type": "let*"
+ , "bindings": [["fieldname", "private-hdrs"]]
+ , "body": {"type": "CALL_EXPRESSION", "name": "artifacts"}
+ }
+ }
+ ]
+ , [ "CXX"
+ , { "type": "var"
+ , "name": "CXX"
+ , "default": {"type": "CALL_EXPRESSION", "name": "default-CXX"}
+ }
+ ]
+ , [ "CXXFLAGS"
+ , { "type": "var"
+ , "name": "CXXFLAGS"
+ , "default": {"type": "CALL_EXPRESSION", "name": "default-CXXFLAGS"}
+ }
+ ]
+ , [ "ENV"
+ , { "type": "map_union"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ {"type": "CALL_EXPRESSION", "name": "default-ENV"}
+ , [ { "type": "var"
+ , "name": "ENV"
+ , "default": {"type": "empty_map"}
+ }
+ ]
+ ]
+ }
+ }
+ ]
+ , ["compile-deps", {"type": "CALL_EXPRESSION", "name": "compile-deps"}]
+ , ["link-deps", {"type": "CALL_EXPRESSION", "name": "link-deps"}]
+ , ["objects", {"type": "CALL_EXPRESSION", "name": "objects"}]
+ , [ "base name"
+ , { "type": "assert_non_empty"
+ , "msg": "A non-empy name has to be provided"
+ , "$1": {"type": "join", "$1": {"type": "FIELD", "name": "name"}}
+ }
+ ]
+ , [ "binary name"
+ , { "type": "if"
+ , "cond": {"type": "var", "name": "stage"}
+ , "else": {"type": "var", "name": "base name"}
+ , "then":
+ { "type": "join"
+ , "separator": "/"
+ , "$1":
+ [ {"type": "var", "name": "stage"}
+ , {"type": "var", "name": "base name"}
+ ]
+ }
+ }
+ ]
+ , [ "link-args"
+ , { "type": "nub_right"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ {"type": "keys", "$1": {"type": "var", "name": "objects"}}
+ , { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "deps"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "dep"}
+ , "provider": "link-args"
+ }
+ }
+ }
+ ]
+ }
+ }
+ ]
+ , [ "binary"
+ , { "type": "ACTION"
+ , "outs": [{"type": "var", "name": "binary name"}]
+ , "inputs":
+ { "type": "disjoint_map_union"
+ , "$1":
+ [ {"type": "var", "name": "objects"}
+ , {"type": "var", "name": "link-deps"}
+ ]
+ }
+ , "cmd":
+ { "type": "++"
+ , "$1":
+ [ [ {"type": "var", "name": "CXX"}
+ , "-o"
+ , {"type": "var", "name": "binary name"}
+ ]
+ , {"type": "var", "name": "link-args"}
+ ]
+ }
+ , "env": {"type": "var", "name": "ENV"}
+ }
+ ]
+ , [ "staged test binary"
+ , { "type": "map_union"
+ , "$1":
+ { "type": "foreach_map"
+ , "range": {"type": "var", "name": "binary"}
+ , "var_val": "binary"
+ , "body":
+ { "type": "singleton_map"
+ , "key": "test"
+ , "value": {"type": "var", "name": "binary"}
+ }
+ }
+ }
+ ]
+ , [ "runner"
+ , { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "runner"
+ , "range": {"type": "FIELD", "name": "runner"}
+ , "body":
+ { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "runner"
+ , "range":
+ { "type": "values"
+ , "$1":
+ { "type": "DEP_ARTIFACTS"
+ , "dep": {"type": "var", "name": "runner"}
+ }
+ }
+ , "body":
+ { "type": "singleton_map"
+ , "key": "runner.sh"
+ , "value": {"type": "var", "name": "runner"}
+ }
+ }
+ }
+ }
+ }
+ ]
+ , [ "data"
+ , { "type": "disjoint_map_union"
+ , "msg": "Data runfiles may not conflict"
+ , "$1":
+ { "type": "foreach"
+ , "var": "dep"
+ , "range": {"type": "FIELD", "name": "data"}
+ , "body":
+ {"type": "DEP_RUNFILES", "dep": {"type": "var", "name": "dep"}}
+ }
+ }
+ ]
+ , [ "test-results"
+ , { "type": "ACTION"
+ , "outs": ["result", "stdout", "stderr", "time-start", "time-stop"]
+ , "inputs":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "to_subdir"
+ , "subdir": "work"
+ , "$1": {"type": "var", "name": "data"}
+ }
+ , {"type": "var", "name": "runner"}
+ , {"type": "var", "name": "staged test binary"}
+ ]
+ }
+ , "cmd": ["sh", "./runner.sh"]
+ , "may_fail": ["test"]
+ , "fail_message":
+ { "type": "join"
+ , "$1":
+ ["CC test ", {"type": "var", "name": "binary name"}, " failed"]
+ }
+ }
+ ]
+ , [ "runfiles"
+ , { "type": "singleton_map"
+ , "key": {"type": "var", "name": "base name"}
+ , "value":
+ {"type": "TREE", "$1": {"type": "var", "name": "test-results"}}
+ }
+ ]
+ ]
+ , "body":
+ { "type": "RESULT"
+ , "artifacts": {"type": "var", "name": "test-results"}
+ , "runfiles": {"type": "var", "name": "runfiles"}
+ }
+ }
+ }
+}
diff --git a/rules/CC/test/test_runner.sh b/rules/CC/test/test_runner.sh
new file mode 100644
index 00000000..ed9f48a9
--- /dev/null
+++ b/rules/CC/test/test_runner.sh
@@ -0,0 +1,36 @@
+#!/bin/sh
+
+# ensure all required outputs are present
+touch stdout
+touch stderr
+RESULT=UNKNOWN
+echo "${RESULT}" > result
+echo UNKNOWN > time-start
+echo UNKNOWN > time-stop
+
+mkdir scratch
+export TEST_TMPDIR=$(realpath scratch)
+# Change to the working directory; note: the test might not
+# have test data, so we have to ensure the presence of the work
+# directory.
+
+mkdir -p work
+cd work
+
+date +%s > ../time-start
+# TODO:
+# - proper wrapping with timeout
+# - test arguments to select specific test cases
+if ../test > ../stdout 2> ../stderr
+then
+ RESULT=PASS
+else
+ RESULT=FAIL
+fi
+date +%s > ../time-stop
+echo "${RESULT}" > result
+
+if [ "${RESULT}" '!=' PASS ]
+then
+ exit 1;
+fi
diff --git a/rules/EXPRESSIONS b/rules/EXPRESSIONS
new file mode 100644
index 00000000..9b35b0ae
--- /dev/null
+++ b/rules/EXPRESSIONS
@@ -0,0 +1,89 @@
+{ "field_artifacts":
+ { "vars": ["fieldname", "transition"]
+ , "expression":
+ { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range":
+ {"type": "FIELD", "name": {"type": "var", "name": "fieldname"}}
+ , "body":
+ { "type": "DEP_ARTIFACTS"
+ , "dep": {"type": "var", "name": "x"}
+ , "transition":
+ { "type": "var"
+ , "name": "transition"
+ , "default": {"type": "empty_map"}
+ }
+ }
+ }
+ }
+ }
+, "field_runfiles":
+ { "vars": ["fieldname", "transition"]
+ , "expression":
+ { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range":
+ {"type": "FIELD", "name": {"type": "var", "name": "fieldname"}}
+ , "body":
+ { "type": "DEP_RUNFILES"
+ , "dep": {"type": "var", "name": "x"}
+ , "transition":
+ { "type": "var"
+ , "name": "transition"
+ , "default": {"type": "empty_map"}
+ }
+ }
+ }
+ }
+ }
+, "action_env":
+ { "vars": ["ENV"]
+ , "expression":
+ { "type": "map_union"
+ , "$1":
+ [ {"type": "singleton_map", "key": "PATH", "value": "/bin:/usr/bin"}
+ , {"type": "var", "name": "ENV", "default": {"type": "empty_map"}}
+ ]
+ }
+ }
+, "stage_singleton_field":
+ { "vars": ["fieldname", "transition", "location"]
+ , "expression":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "src"
+ , "range":
+ {"type": "FIELD", "name": {"type": "var", "name": "fieldname"}}
+ , "body":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "artifact"
+ , "range":
+ { "type": "values"
+ , "$1":
+ { "type": "DEP_ARTIFACTS"
+ , "dep": {"type": "var", "name": "src"}
+ , "transition":
+ { "type": "var"
+ , "name": "transition"
+ , "default": {"type": "empty_map"}
+ }
+ }
+ }
+ , "body":
+ { "type": "singleton_map"
+ , "key": {"type": "var", "name": "location"}
+ , "value": {"type": "var", "name": "artifact"}
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/rules/data/RULES b/rules/data/RULES
new file mode 100644
index 00000000..34bc6242
--- /dev/null
+++ b/rules/data/RULES
@@ -0,0 +1,42 @@
+{ "staged":
+ { "doc": ["Stage data to a logical subdirectory."]
+ , "target_fields": ["srcs"]
+ , "string_fields": ["stage"]
+ , "field_doc":
+ { "srcs": ["The (run)files to be staged"]
+ , "stage":
+ [ "The logical directory to stage the files to."
+ , "Individual directory components are joined with \"/\"."
+ ]
+ }
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "stage"
+ , { "type": "join"
+ , "separator": "/"
+ , "$1": {"type": "FIELD", "name": "stage"}
+ }
+ ]
+ , [ "srcs"
+ , { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "srcs"}
+ , "body":
+ {"type": "DEP_RUNFILES", "dep": {"type": "var", "name": "x"}}
+ }
+ }
+ ]
+ , [ "staged"
+ , { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1": {"type": "var", "name": "srcs"}
+ }
+ ]
+ ]
+ , "body": {"type": "RESULT", "runfiles": {"type": "var", "name": "staged"}}
+ }
+ }
+}
diff --git a/rules/proto/RULES b/rules/proto/RULES
new file mode 100644
index 00000000..b6ce000d
--- /dev/null
+++ b/rules/proto/RULES
@@ -0,0 +1,105 @@
+{ "library":
+ { "target_fields": ["srcs", "deps"]
+ , "string_fields": ["stage", "name", "service"]
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "stage"
+ , { "type": "join"
+ , "separator": "/"
+ , "$1": {"type": "FIELD", "name": "stage"}
+ }
+ ]
+ , [ "name"
+ , { "type": "assert_non_empty"
+ , "msg": "Have to provide a name, unique in the stage"
+ , "$1": {"type": "join", "$1": {"type": "FIELD", "name": "name"}}
+ }
+ ]
+ , [ "srcs"
+ , [ { "type": "VALUE_NODE"
+ , "$1":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "to_subdir"
+ , "subdir": {"type": "var", "name": "stage"}
+ , "$1":
+ { "type": "disjoint_map_union"
+ , "msg": "Sources have to be conflict free"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "srcs"}
+ , "body":
+ { "type": "DEP_ARTIFACTS"
+ , "dep": {"type": "var", "name": "x"}
+ }
+ }
+ }
+ }
+ }
+ }
+ ]
+ ]
+ , [ "deps"
+ , { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "deps"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "x"}
+ , "provider": "proto"
+ }
+ }
+ }
+ ]
+ , [ "node"
+ , { "type": "ABSTRACT_NODE"
+ , "node_type":
+ { "type": "if"
+ , "cond": {"type": "FIELD", "name": "service"}
+ , "then": "service library"
+ , "else": "library"
+ }
+ , "target_fields":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "srcs"
+ , "value": {"type": "var", "name": "srcs"}
+ }
+ , { "type": "singleton_map"
+ , "key": "deps"
+ , "value": {"type": "var", "name": "deps"}
+ }
+ ]
+ }
+ , "string_fields":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "name"
+ , "value": [{"type": "var", "name": "name"}]
+ }
+ , { "type": "singleton_map"
+ , "key": "stage"
+ , "value": [{"type": "var", "name": "stage"}]
+ }
+ ]
+ }
+ }
+ ]
+ ]
+ , "body":
+ { "type": "RESULT"
+ , "provides":
+ { "type": "singleton_map"
+ , "key": "proto"
+ , "value": [{"type": "var", "name": "node"}]
+ }
+ }
+ }
+ }
+}
diff --git a/rules/transitions/EXPRESSIONS b/rules/transitions/EXPRESSIONS
new file mode 100644
index 00000000..8ea75500
--- /dev/null
+++ b/rules/transitions/EXPRESSIONS
@@ -0,0 +1,13 @@
+{ "for host":
+ { "vars": ["ARCH", "HOST_ARCH"]
+ , "expression":
+ { "type": "singleton_map"
+ , "key": "TARGET_ARCH"
+ , "value":
+ { "type": "var"
+ , "name": "HOST_ARCH"
+ , "default": {"type": "var", "name": "ARCH"}
+ }
+ }
+ }
+}
diff --git a/src/buildtool/TARGETS b/src/buildtool/TARGETS
new file mode 100644
index 00000000..9e26dfee
--- /dev/null
+++ b/src/buildtool/TARGETS
@@ -0,0 +1 @@
+{} \ No newline at end of file
diff --git a/src/buildtool/build_engine/analysed_target/TARGETS b/src/buildtool/build_engine/analysed_target/TARGETS
new file mode 100644
index 00000000..4884ec46
--- /dev/null
+++ b/src/buildtool/build_engine/analysed_target/TARGETS
@@ -0,0 +1,12 @@
+{ "target":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["target"]
+ , "hdrs": ["analysed_target.hpp"]
+ , "deps":
+ [ ["src/buildtool/build_engine/expression", "expression"]
+ , ["src/buildtool/common", "action_description"]
+ , ["src/buildtool/common", "tree"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "analysed_target"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/build_engine/analysed_target/analysed_target.hpp b/src/buildtool/build_engine/analysed_target/analysed_target.hpp
new file mode 100644
index 00000000..af92b0bc
--- /dev/null
+++ b/src/buildtool/build_engine/analysed_target/analysed_target.hpp
@@ -0,0 +1,101 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILDENGINE_ANALYSED_TARGET_ANALYSED_TARGET_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILDENGINE_ANALYSED_TARGET_ANALYSED_TARGET_HPP
+
+#include <memory>
+#include <set>
+#include <string>
+#include <unordered_set>
+#include <vector>
+
+#include "src/buildtool/build_engine/expression/configuration.hpp"
+#include "src/buildtool/build_engine/expression/expression_ptr.hpp"
+#include "src/buildtool/build_engine/expression/target_result.hpp"
+#include "src/buildtool/common/action_description.hpp"
+#include "src/buildtool/common/tree.hpp"
+
+class AnalysedTarget {
+ public:
+ AnalysedTarget(TargetResult result,
+ std::vector<ActionDescription> actions,
+ std::vector<std::string> blobs,
+ std::vector<Tree> trees,
+ std::unordered_set<std::string> vars,
+ std::set<std::string> tainted)
+ : result_{std::move(result)},
+ actions_{std::move(actions)},
+ blobs_{std::move(blobs)},
+ trees_{std::move(trees)},
+ vars_{std::move(vars)},
+ tainted_{std::move(tainted)} {}
+
+ [[nodiscard]] auto Actions() const& noexcept
+ -> std::vector<ActionDescription> const& {
+ return actions_;
+ }
+ [[nodiscard]] auto Actions() && noexcept -> std::vector<ActionDescription> {
+ return std::move(actions_);
+ }
+ [[nodiscard]] auto Artifacts() const& noexcept -> ExpressionPtr const& {
+ return result_.artifact_stage;
+ }
+ [[nodiscard]] auto Artifacts() && noexcept -> ExpressionPtr {
+ return std::move(result_.artifact_stage);
+ }
+ [[nodiscard]] auto RunFiles() const& noexcept -> ExpressionPtr const& {
+ return result_.runfiles;
+ }
+ [[nodiscard]] auto RunFiles() && noexcept -> ExpressionPtr {
+ return std::move(result_.runfiles);
+ }
+ [[nodiscard]] auto Provides() const& noexcept -> ExpressionPtr const& {
+ return result_.provides;
+ }
+ [[nodiscard]] auto Provides() && noexcept -> ExpressionPtr {
+ return std::move(result_.provides);
+ }
+ [[nodiscard]] auto Blobs() const& noexcept
+ -> std::vector<std::string> const& {
+ return blobs_;
+ }
+ [[nodiscard]] auto Trees() && noexcept -> std::vector<Tree> {
+ return std::move(trees_);
+ }
+ [[nodiscard]] auto Trees() const& noexcept -> std::vector<Tree> const& {
+ return trees_;
+ }
+ [[nodiscard]] auto Blobs() && noexcept -> std::vector<std::string> {
+ return std::move(blobs_);
+ }
+ [[nodiscard]] auto Vars() const& noexcept
+ -> std::unordered_set<std::string> const& {
+ return vars_;
+ }
+ [[nodiscard]] auto Vars() && noexcept -> std::unordered_set<std::string> {
+ return std::move(vars_);
+ }
+ [[nodiscard]] auto Tainted() const& noexcept
+ -> std::set<std::string> const& {
+ return tainted_;
+ }
+ [[nodiscard]] auto Tainted() && noexcept -> std::set<std::string> {
+ return std::move(tainted_);
+ }
+ [[nodiscard]] auto Result() const& noexcept -> TargetResult const& {
+ return result_;
+ }
+ [[nodiscard]] auto Result() && noexcept -> TargetResult {
+ return std::move(result_);
+ }
+
+ private:
+ TargetResult result_;
+ std::vector<ActionDescription> actions_;
+ std::vector<std::string> blobs_;
+ std::vector<Tree> trees_;
+ std::unordered_set<std::string> vars_;
+ std::set<std::string> tainted_;
+};
+
+using AnalysedTargetPtr = std::shared_ptr<AnalysedTarget>;
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILDENGINE_ANALYSED_TARGET_ANALYSED_TARGET_HPP
diff --git a/src/buildtool/build_engine/base_maps/TARGETS b/src/buildtool/build_engine/base_maps/TARGETS
new file mode 100644
index 00000000..7ba580ca
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/TARGETS
@@ -0,0 +1,162 @@
+{ "module_name":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["module_name"]
+ , "hdrs": ["module_name.hpp"]
+ , "deps": [["src/utils/cpp", "hash_combine"]]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "directory_map":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["directory_map"]
+ , "hdrs": ["directory_map.hpp"]
+ , "srcs": ["directory_map.cpp"]
+ , "deps":
+ [ ["src/buildtool/common", "config"]
+ , ["src/buildtool/multithreading", "async_map_consumer"]
+ , "module_name"
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "json_file_map":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["json_file_map"]
+ , "hdrs": ["json_file_map.hpp"]
+ , "deps":
+ [ ["@", "fmt", "", "fmt"]
+ , ["@", "json", "", "json"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ , ["src/buildtool/common", "config"]
+ , ["src/buildtool/multithreading", "async_map_consumer"]
+ , "module_name"
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "targets_file_map":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["targets_file_map"]
+ , "hdrs": ["targets_file_map.hpp"]
+ , "deps":
+ [ "json_file_map"
+ , ["@", "json", "", "json"]
+ , ["src/buildtool/multithreading", "async_map_consumer"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "entity_name_data":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["entity_name_data"]
+ , "hdrs": ["entity_name_data.hpp"]
+ , "deps":
+ [ ["@", "json", "", "json"]
+ , ["src/utils/cpp", "hash_combine"]
+ , "module_name"
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "entity_name":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["entity_name"]
+ , "hdrs": ["entity_name.hpp"]
+ , "deps":
+ [ "entity_name_data"
+ , ["@", "json", "", "json"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ , ["src/buildtool/common", "config"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "source_map":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["source_map"]
+ , "hdrs": ["source_map.hpp"]
+ , "srcs": ["source_map.cpp"]
+ , "deps":
+ [ "directory_map"
+ , "entity_name"
+ , ["@", "json", "", "json"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ , ["src/buildtool/build_engine/analysed_target", "target"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ , ["src/buildtool/multithreading", "async_map_consumer"]
+ , ["src/utils/cpp", "json"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "field_reader":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["field_reader"]
+ , "hdrs": ["field_reader.hpp"]
+ , "deps":
+ [ "entity_name"
+ , ["@", "fmt", "", "fmt"]
+ , ["@", "json", "", "json"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ , ["src/buildtool/multithreading", "async_map_consumer"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "expression_function":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["expression_function"]
+ , "hdrs": ["expression_function.hpp"]
+ , "deps":
+ [ ["src/utils/cpp", "hash_combine"]
+ , ["src/buildtool/logging", "logging"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "expression_map":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["expression_map"]
+ , "hdrs": ["expression_map.hpp"]
+ , "srcs": ["expression_map.cpp"]
+ , "deps":
+ [ "json_file_map"
+ , "entity_name"
+ , "expression_function"
+ , "field_reader"
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ , ["@", "fmt", "", "fmt"]
+ , ["@", "json", "", "json"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ , ["src/buildtool/multithreading", "async_map_consumer"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "user_rule":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["user_rule"]
+ , "hdrs": ["user_rule.hpp"]
+ , "deps":
+ [ "entity_name"
+ , "expression_function"
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ , ["@", "fmt", "", "fmt"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ , ["src/utils/cpp", "concepts"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+, "rule_map":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["rule_map"]
+ , "hdrs": ["rule_map.hpp"]
+ , "srcs": ["rule_map.cpp"]
+ , "deps":
+ [ "json_file_map"
+ , "entity_name"
+ , "user_rule"
+ , "field_reader"
+ , "expression_map"
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ , ["@", "fmt", "", "fmt"]
+ , ["@", "json", "", "json"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ , ["src/buildtool/multithreading", "async_map_consumer"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "base_maps"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/build_engine/base_maps/directory_map.cpp b/src/buildtool/build_engine/base_maps/directory_map.cpp
new file mode 100644
index 00000000..1b862386
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/directory_map.cpp
@@ -0,0 +1,35 @@
+#include "src/buildtool/build_engine/base_maps/directory_map.hpp"
+
+#include <filesystem>
+#include <unordered_set>
+
+#include "src/buildtool/common/repository_config.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+
+auto BuildMaps::Base::CreateDirectoryEntriesMap(std::size_t jobs)
+ -> DirectoryEntriesMap {
+ auto directory_reader = [](auto /* unused*/,
+ auto setter,
+ auto logger,
+ auto /* unused */,
+ auto const& key) {
+ auto const* ws_root =
+ RepositoryConfig::Instance().WorkspaceRoot(key.repository);
+ if (ws_root == nullptr) {
+ (*logger)(
+ fmt::format("Cannot determine workspace root for repository {}",
+ key.repository),
+ true);
+ return;
+ }
+ if (not ws_root->IsDirectory(key.module)) {
+ // Missing directory is fine (source tree might be incomplete),
+ // contains no entries.
+ (*setter)(FileRoot::DirectoryEntries{});
+ return;
+ }
+ (*setter)(ws_root->ReadDirectory(key.module));
+ };
+ return AsyncMapConsumer<BuildMaps::Base::ModuleName,
+ FileRoot::DirectoryEntries>{directory_reader, jobs};
+}
diff --git a/src/buildtool/build_engine/base_maps/directory_map.hpp b/src/buildtool/build_engine/base_maps/directory_map.hpp
new file mode 100644
index 00000000..fb675997
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/directory_map.hpp
@@ -0,0 +1,22 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_DIRECTORY_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_DIRECTORY_MAP_HPP
+
+#include <filesystem>
+#include <map>
+#include <unordered_set>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/base_maps/module_name.hpp"
+#include "src/buildtool/file_system/file_root.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+
+namespace BuildMaps::Base {
+
+using DirectoryEntriesMap =
+ AsyncMapConsumer<ModuleName, FileRoot::DirectoryEntries>;
+
+auto CreateDirectoryEntriesMap(std::size_t jobs = 0) -> DirectoryEntriesMap;
+
+} // namespace BuildMaps::Base
+
+#endif
diff --git a/src/buildtool/build_engine/base_maps/entity_name.hpp b/src/buildtool/build_engine/base_maps/entity_name.hpp
new file mode 100644
index 00000000..12fbd6ee
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/entity_name.hpp
@@ -0,0 +1,206 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_ENTITY_NAME_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_ENTITY_NAME_HPP
+
+#include <filesystem>
+#include <optional>
+#include <utility>
+
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name_data.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/buildtool/common/repository_config.hpp"
+#include "src/utils/cpp/hash_combine.hpp"
+
+namespace BuildMaps::Base {
+
+[[nodiscard]] inline auto ParseEntityNameFromJson(
+ nlohmann::json const& json,
+ EntityName const& current,
+ std::optional<std::function<void(std::string const&)>> logger =
+ std::nullopt) noexcept -> std::optional<EntityName> {
+ try {
+ if (json.is_string()) {
+ return EntityName{current.repository,
+ current.module,
+ json.template get<std::string>()};
+ }
+ if (json.is_array() and json.size() == 2 and json[0].is_string() and
+ json[1].is_string()) {
+ return EntityName{current.repository,
+ json[0].template get<std::string>(),
+ json[1].template get<std::string>()};
+ }
+ if (json.is_array() and json.size() == 3 and json[0].is_string() and
+ json[0].template get<std::string>() ==
+ EntityName::kFileLocationMarker and
+ json[2].is_string()) {
+ auto name = json[2].template get<std::string>();
+ if (json[1].is_null()) {
+ return EntityName{
+ current.repository, current.module, name, true};
+ }
+ if (json[1].is_string()) {
+ auto middle = json[1].template get<std::string>();
+ if (middle == "." or middle == current.module) {
+ return EntityName{
+ current.repository, current.module, name, true};
+ }
+ }
+ if (logger) {
+ (*logger)(
+ fmt::format("Invalid module name {} for file reference",
+ json[1].dump()));
+ }
+ }
+ else if (json.is_array() and json.size() == 3 and
+ json[0].is_string() and
+ json[0].template get<std::string>() ==
+ EntityName::kRelativeLocationMarker and
+ json[1].is_string() and json[2].is_string()) {
+ auto relmodule = json[1].template get<std::string>();
+ auto name = json[2].template get<std::string>();
+
+ std::filesystem::path m{current.module};
+ auto module = (m / relmodule).lexically_normal().string();
+ if (module.compare(0, 3, "../") != 0) {
+ return EntityName{current.repository, module, name};
+ }
+ if (logger) {
+ (*logger)(fmt::format(
+ "Relative module name {} is outside of workspace",
+ relmodule));
+ }
+ }
+ else if (json.is_array() and json.size() == 3 and
+ json[0].is_string() and
+ json[0].template get<std::string>() ==
+ EntityName::kAnonymousMarker) {
+ if (logger) {
+ (*logger)(fmt::format(
+ "Parsing anonymous target from JSON is not supported."));
+ }
+ }
+ else if (json.is_array() and json.size() == 4 and
+ json[0].is_string() and
+ json[0].template get<std::string>() ==
+ EntityName::kLocationMarker and
+ json[1].is_string() and json[2].is_string() and
+ json[3].is_string()) {
+ auto local_repo_name = json[1].template get<std::string>();
+ auto module = json[2].template get<std::string>();
+ auto target = json[3].template get<std::string>();
+ auto const* repo_name = RepositoryConfig::Instance().GlobalName(
+ current.repository, local_repo_name);
+ if (repo_name != nullptr) {
+ return EntityName{*repo_name, module, target};
+ }
+ if (logger) {
+ (*logger)(fmt::format("Cannot resolve repository name {}",
+ local_repo_name));
+ }
+ }
+ else if (logger) {
+ (*logger)(fmt::format("Syntactically invalid entity name: {}.",
+ json.dump()));
+ }
+ } catch (...) {
+ }
+ return std::nullopt;
+}
+
+[[nodiscard]] inline auto ParseEntityNameFromExpression(
+ ExpressionPtr const& expr,
+ EntityName const& current,
+ std::optional<std::function<void(std::string const&)>> logger =
+ std::nullopt) noexcept -> std::optional<EntityName> {
+ try {
+ if (expr) {
+ if (expr->IsString()) {
+ return EntityName{current.repository,
+ current.module,
+ expr->Value<std::string>()->get()};
+ }
+ if (expr->IsList()) {
+ auto const& list = expr->Value<Expression::list_t>()->get();
+ if (list.size() == 2 and list[0]->IsString() and
+ list[1]->IsString()) {
+ return EntityName{current.repository,
+ list[0]->Value<std::string>()->get(),
+ list[1]->Value<std::string>()->get()};
+ }
+ if (list.size() == 3 and list[0]->IsString() and
+ list[0]->String() == EntityName::kFileLocationMarker and
+ list[2]->IsString()) {
+ auto name = list[2]->Value<std::string>()->get();
+ if (list[1]->IsNone()) {
+ return EntityName{
+ current.repository, current.module, name, true};
+ }
+ if (list[1]->IsString() and
+ (list[1]->String() == "." or
+ list[1]->String() == current.module)) {
+ return EntityName{
+ current.repository, current.module, name, true};
+ }
+ if (logger) {
+ (*logger)(fmt::format(
+ "Invalid module name {} for file reference",
+ list[1]->ToString()));
+ }
+ }
+ else if (list.size() == 3 and list[0]->IsString() and
+ list[0]->String() ==
+ EntityName::kRelativeLocationMarker and
+ list[1]->IsString() and list[2]->IsString()) {
+ std::filesystem::path m{current.module};
+ auto module =
+ (m / (list[1]->String())).lexically_normal().string();
+ if (module.compare(0, 3, "../") != 0) {
+ return EntityName{
+ current.repository, module, list[2]->String()};
+ }
+ if (logger) {
+ (*logger)(fmt::format(
+ "Relative module name {} is outside of workspace",
+ list[1]->String()));
+ }
+ }
+ else if (list.size() == 3 and list[0]->IsString() and
+ list[0]->String() ==
+ EntityName::kRelativeLocationMarker and
+ list[1]->IsMap() and list[2]->IsNode()) {
+ return EntityName{AnonymousTarget{list[1], list[2]}};
+ }
+ else if (list.size() == 4 and list[0]->IsString() and
+ list[0]->String() == EntityName::kLocationMarker and
+ list[1]->IsString() and list[2]->IsString() and
+ list[3]->IsString()) {
+ auto const* repo_name =
+ RepositoryConfig::Instance().GlobalName(
+ current.repository, list[1]->String());
+ if (repo_name != nullptr) {
+ return EntityName{
+ *repo_name, list[2]->String(), list[3]->String()};
+ }
+ if (logger) {
+ (*logger)(
+ fmt::format("Cannot resolve repository name {}",
+ list[1]->String()));
+ }
+ }
+ else if (logger) {
+ (*logger)(
+ fmt::format("Syntactically invalid entity name: {}.",
+ expr->ToString()));
+ }
+ }
+ }
+ } catch (...) {
+ }
+ return std::nullopt;
+}
+
+} // namespace BuildMaps::Base
+
+#endif
diff --git a/src/buildtool/build_engine/base_maps/entity_name_data.hpp b/src/buildtool/build_engine/base_maps/entity_name_data.hpp
new file mode 100644
index 00000000..217ccfb5
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/entity_name_data.hpp
@@ -0,0 +1,129 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_ENTITY_NAME_DATA_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_ENTITY_NAME_DATA_HPP
+
+#include <filesystem>
+#include <optional>
+#include <utility>
+
+#include "nlohmann/json.hpp"
+#include "src/buildtool/build_engine/base_maps/module_name.hpp"
+#include "src/buildtool/build_engine/expression/expression_ptr.hpp"
+#include "src/utils/cpp/hash_combine.hpp"
+#include "src/utils/cpp/hex_string.hpp"
+
+namespace BuildMaps::Base {
+
+struct AnonymousTarget {
+ ExpressionPtr rule_map;
+ ExpressionPtr target_node;
+
+ [[nodiscard]] auto operator==(AnonymousTarget const& other) const noexcept
+ -> bool {
+ return rule_map == other.rule_map && target_node == other.target_node;
+ }
+};
+
+struct EntityName {
+ static constexpr auto kLocationMarker = "@";
+ static constexpr auto kFileLocationMarker = "FILE";
+ static constexpr auto kRelativeLocationMarker = "./";
+ static constexpr auto kAnonymousMarker = "#";
+
+ std::string repository{};
+ std::string module{};
+ std::string name{};
+ std::optional<AnonymousTarget> anonymous{};
+ bool explicit_file_reference{};
+
+ EntityName() = default;
+ EntityName(std::string repository,
+ const std::string& module,
+ std::string name)
+ : repository{std::move(repository)},
+ module{normal_module_name(module)},
+ name{std::move(name)} {}
+ explicit EntityName(AnonymousTarget anonymous)
+ : anonymous{std::move(anonymous)} {}
+
+ static auto normal_module_name(const std::string& module) -> std::string {
+ return std::filesystem::path("/" + module + "/")
+ .lexically_normal()
+ .lexically_relative("/")
+ .parent_path()
+ .string();
+ }
+
+ [[nodiscard]] auto operator==(
+ BuildMaps::Base::EntityName const& other) const noexcept -> bool {
+ return module == other.module && name == other.name &&
+ repository == other.repository && anonymous == other.anonymous &&
+ explicit_file_reference == other.explicit_file_reference;
+ }
+
+ [[nodiscard]] auto ToJson() const -> nlohmann::json {
+ nlohmann::json j;
+ if (IsAnonymousTarget()) {
+ j.push_back(kAnonymousMarker);
+ j.push_back(anonymous->rule_map.ToIdentifier());
+ j.push_back(anonymous->target_node.ToIdentifier());
+ }
+ else {
+ j.push_back(kLocationMarker);
+ j.push_back(repository);
+ if (explicit_file_reference) {
+ j.push_back(kFileLocationMarker);
+ }
+ j.push_back(module);
+ j.push_back(name);
+ }
+ return j;
+ }
+
+ [[nodiscard]] auto ToString() const -> std::string {
+ return ToJson().dump();
+ }
+
+ [[nodiscard]] auto ToModule() const -> ModuleName {
+ return ModuleName{repository, module};
+ }
+
+ [[nodiscard]] auto IsDefinitionName() const -> bool {
+ return (not explicit_file_reference);
+ }
+
+ [[nodiscard]] auto IsAnonymousTarget() const -> bool {
+ return static_cast<bool>(anonymous);
+ }
+
+ EntityName(std::string repository,
+ const std::string& module,
+ std::string name,
+ bool explicit_file_reference)
+ : repository{std::move(repository)},
+ module{normal_module_name(module)},
+ name{std::move(name)},
+ explicit_file_reference{explicit_file_reference} {}
+};
+} // namespace BuildMaps::Base
+
+namespace std {
+template <>
+struct hash<BuildMaps::Base::EntityName> {
+ [[nodiscard]] auto operator()(
+ const BuildMaps::Base::EntityName& t) const noexcept -> std::size_t {
+ size_t seed{};
+ hash_combine<std::string>(&seed, t.repository);
+ hash_combine<std::string>(&seed, t.module);
+ hash_combine<std::string>(&seed, t.name);
+ auto anonymous =
+ t.anonymous.value_or(BuildMaps::Base::AnonymousTarget{});
+ hash_combine<ExpressionPtr>(&seed, anonymous.rule_map);
+ hash_combine<ExpressionPtr>(&seed, anonymous.target_node);
+ hash_combine<bool>(&seed, t.explicit_file_reference);
+ return seed;
+ }
+};
+
+} // namespace std
+
+#endif
diff --git a/src/buildtool/build_engine/base_maps/expression_function.hpp b/src/buildtool/build_engine/base_maps/expression_function.hpp
new file mode 100644
index 00000000..0eb07a88
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/expression_function.hpp
@@ -0,0 +1,103 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_EXPRESSION_FUNCTION_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_EXPRESSION_FUNCTION_HPP
+
+#include <memory>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "fmt/core.h"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/expression/configuration.hpp"
+#include "src/buildtool/build_engine/expression/evaluator.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+namespace BuildMaps::Base {
+
+class ExpressionFunction {
+ public:
+ using Ptr = std::shared_ptr<ExpressionFunction>;
+ using imports_t = std::unordered_map<std::string, gsl::not_null<Ptr>>;
+
+ ExpressionFunction(std::vector<std::string> vars,
+ imports_t imports,
+ ExpressionPtr expr) noexcept
+ : vars_{std::move(vars)},
+ imports_{std::move(imports)},
+ expr_{std::move(expr)} {}
+
+ [[nodiscard]] auto Evaluate(
+ Configuration const& env,
+ FunctionMapPtr const& functions,
+ std::function<void(std::string const&)> const& logger =
+ [](std::string const& error) noexcept -> void {
+ Logger::Log(LogLevel::Error, error);
+ },
+ std::function<void(void)> const& note_user_context =
+ []() noexcept -> void {}) const noexcept -> ExpressionPtr {
+ try { // try-catch to silence clang-tidy's bugprone-exception-escape,
+ // only imports_caller can throw but it is not called here.
+ auto imports_caller = [this, &functions](
+ SubExprEvaluator&& /*eval*/,
+ ExpressionPtr const& expr,
+ Configuration const& env) {
+ auto name_expr = expr["name"];
+ auto const& name = name_expr->String();
+ auto it = imports_.find(name);
+ if (it != imports_.end()) {
+ std::stringstream ss{};
+ bool user_context = false;
+ auto result = it->second->Evaluate(
+ env,
+ functions,
+ [&ss](auto const& msg) { ss << msg; },
+ [&user_context]() { user_context = true; }
+
+ );
+ if (result) {
+ return result;
+ }
+ if (user_context) {
+ throw Evaluator::EvaluationError(ss.str(), true, true);
+ }
+ throw Evaluator::EvaluationError(
+ fmt::format(
+ "This call to {} failed in the following way:\n{}",
+ name_expr->ToString(),
+ ss.str()),
+ true);
+ }
+ throw Evaluator::EvaluationError(
+ fmt::format("Unknown expression '{}'.", name));
+ };
+ auto newenv = env.Prune(vars_);
+ return expr_.Evaluate(
+ newenv,
+ FunctionMap::MakePtr(
+ functions, "CALL_EXPRESSION", imports_caller),
+ logger,
+ note_user_context);
+ } catch (...) {
+ gsl_EnsuresAudit(false); // ensure that the try-block never throws
+ return ExpressionPtr{nullptr};
+ }
+ }
+
+ inline static Ptr const kEmptyTransition =
+ std::make_shared<ExpressionFunction>(
+ std::vector<std::string>{},
+ ExpressionFunction::imports_t{},
+ Expression::FromJson(R"([{"type": "empty_map"}])"_json));
+
+ private:
+ std::vector<std::string> vars_{};
+ imports_t imports_{};
+ ExpressionPtr expr_{};
+};
+
+using ExpressionFunctionPtr = ExpressionFunction::Ptr;
+
+} // namespace BuildMaps::Base
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_EXPRESSION_FUNCTION_HPP
diff --git a/src/buildtool/build_engine/base_maps/expression_map.cpp b/src/buildtool/build_engine/base_maps/expression_map.cpp
new file mode 100644
index 00000000..df464b15
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/expression_map.cpp
@@ -0,0 +1,90 @@
+
+#include "src/buildtool/build_engine/base_maps/expression_map.hpp"
+
+#include <optional>
+#include <string>
+
+#include "fmt/core.h"
+#include "src/buildtool/build_engine/base_maps/field_reader.hpp"
+
+namespace BuildMaps::Base {
+
+auto CreateExpressionMap(gsl::not_null<ExpressionFileMap*> const& expr_file_map,
+ std::size_t jobs) -> ExpressionFunctionMap {
+ auto expr_func_creator = [expr_file_map](auto ts,
+ auto setter,
+ auto logger,
+ auto subcaller,
+ auto const& id) {
+ if (not id.IsDefinitionName()) {
+ (*logger)(
+ fmt::format("{} cannot name an expression", id.ToString()),
+ true);
+ return;
+ }
+ expr_file_map->ConsumeAfterKeysReady(
+ ts,
+ {id.ToModule()},
+ [setter = std::move(setter),
+ logger,
+ subcaller = std::move(subcaller),
+ id](auto json_values) {
+ auto func_it = json_values[0]->find(id.name);
+ if (func_it == json_values[0]->end()) {
+ (*logger)(fmt::format("Cannot find expression {} in {}",
+ id.name,
+ id.module),
+ true);
+ return;
+ }
+
+ auto reader = FieldReader::Create(
+ func_it.value(), id, "expression", logger);
+ if (not reader) {
+ return;
+ }
+
+ auto expr = reader->ReadExpression("expression");
+ if (not expr) {
+ return;
+ }
+
+ auto vars = reader->ReadStringList("vars");
+ if (not vars) {
+ return;
+ }
+
+ auto import_aliases =
+ reader->ReadEntityAliasesObject("imports");
+ if (not import_aliases) {
+ return;
+ }
+ auto [names, ids] = std::move(*import_aliases).Obtain();
+
+ (*subcaller)(
+ std::move(ids),
+ [setter = std::move(setter),
+ vars = std::move(*vars),
+ names = std::move(names),
+ expr = std::move(expr)](auto const& expr_funcs) {
+ auto imports = ExpressionFunction::imports_t{};
+ imports.reserve(expr_funcs.size());
+ for (std::size_t i{}; i < expr_funcs.size(); ++i) {
+ imports.emplace(names[i], *expr_funcs[i]);
+ }
+ (*setter)(std::make_shared<ExpressionFunction>(
+ vars, imports, expr));
+ },
+ std::move(logger));
+ },
+ [logger, id](auto msg, auto fatal) {
+ (*logger)(fmt::format("While reading expression file in {}: {}",
+ id.module,
+ msg),
+ fatal);
+ });
+ };
+ return ExpressionFunctionMap{expr_func_creator, jobs};
+}
+
+} // namespace BuildMaps::Base
diff --git a/src/buildtool/build_engine/base_maps/expression_map.hpp b/src/buildtool/build_engine/base_maps/expression_map.hpp
new file mode 100644
index 00000000..21ea8ec8
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/expression_map.hpp
@@ -0,0 +1,32 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_EXPRESSION_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_EXPRESSION_MAP_HPP
+
+#include <memory>
+#include <string>
+
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/base_maps/expression_function.hpp"
+#include "src/buildtool/build_engine/base_maps/json_file_map.hpp"
+#include "src/buildtool/build_engine/base_maps/module_name.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+
+namespace BuildMaps::Base {
+
+using ExpressionFileMap = AsyncMapConsumer<ModuleName, nlohmann::json>;
+
+constexpr auto CreateExpressionFileMap =
+ CreateJsonFileMap<&RepositoryConfig::ExpressionRoot,
+ &RepositoryConfig::ExpressionFileName,
+ /*kMandatory=*/true>;
+
+using ExpressionFunctionMap =
+ AsyncMapConsumer<EntityName, ExpressionFunctionPtr>;
+
+auto CreateExpressionMap(gsl::not_null<ExpressionFileMap*> const& expr_file_map,
+ std::size_t jobs = 0) -> ExpressionFunctionMap;
+
+} // namespace BuildMaps::Base
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_EXPRESSION_MAP_HPP
diff --git a/src/buildtool/build_engine/base_maps/field_reader.hpp b/src/buildtool/build_engine/base_maps/field_reader.hpp
new file mode 100644
index 00000000..30e0fe00
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/field_reader.hpp
@@ -0,0 +1,231 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_FIELD_READER_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_FIELD_READER_HPP
+
+#include <memory>
+#include <optional>
+#include <string>
+#include <vector>
+
+#include "fmt/core.h"
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+
+namespace BuildMaps::Base {
+
+[[nodiscard]] static inline auto GetOrDefault(nlohmann::json const& json,
+ std::string const& key,
+ nlohmann::json&& default_value)
+ -> nlohmann::json {
+ auto value = json.find(key);
+ if (value != json.end()) {
+ return value.value();
+ }
+ return std::move(default_value);
+}
+
+class FieldReader {
+ public:
+ using Ptr = std::shared_ptr<FieldReader>;
+
+ class EntityAliases {
+ public:
+ [[nodiscard]] auto Obtain() && -> std::pair<std::vector<std::string>,
+ std::vector<EntityName>> {
+ return std::make_pair(std::move(names_), std::move(ids_));
+ }
+ auto reserve(std::size_t size) -> void {
+ names_.reserve(size);
+ ids_.reserve(size);
+ }
+ template <class T_Name, class T_Id>
+ auto emplace_back(T_Name&& name, T_Id&& id) -> void {
+ names_.emplace_back(std::forward<T_Name>(name));
+ ids_.emplace_back(std::forward<T_Id>(id));
+ }
+
+ private:
+ std::vector<std::string> names_;
+ std::vector<EntityName> ids_;
+ };
+
+ [[nodiscard]] static auto Create(nlohmann::json const& json,
+ EntityName const& id,
+ std::string const& entity_type,
+ AsyncMapConsumerLoggerPtr const& logger)
+ -> std::optional<FieldReader> {
+ if (not json.is_object()) {
+ (*logger)(
+ fmt::format(
+ "{} definition {} is not an object.", entity_type, id.name),
+ true);
+ return std::nullopt;
+ }
+ return FieldReader(json, id, entity_type, logger);
+ }
+
+ [[nodiscard]] static auto CreatePtr(nlohmann::json const& json,
+ EntityName const& id,
+ std::string const& entity_type,
+ AsyncMapConsumerLoggerPtr const& logger)
+ -> Ptr {
+ if (not json.is_object()) {
+ (*logger)(
+ fmt::format(
+ "{} definition {} is not an object.", entity_type, id.name),
+ true);
+ return nullptr;
+ }
+ return std::make_shared<FieldReader>(json, id, entity_type, logger);
+ }
+
+ [[nodiscard]] auto ReadExpression(std::string const& field_name) const
+ -> ExpressionPtr {
+ auto expr_it = json_.find(field_name);
+ if (expr_it == json_.end()) {
+ (*logger_)(fmt::format("Missing mandatory field {} in {} {}.",
+ field_name,
+ entity_type_,
+ id_.name),
+ true);
+ return ExpressionPtr{nullptr};
+ }
+
+ auto expr = Expression::FromJson(expr_it.value());
+ if (not expr) {
+ (*logger_)(
+ fmt::format("Failed to create expression from JSON:\n {}",
+ json_.dump()),
+ true);
+ }
+ return expr;
+ }
+
+ [[nodiscard]] auto ReadOptionalExpression(
+ std::string const& field_name,
+ ExpressionPtr const& default_value) const -> ExpressionPtr {
+ auto expr_it = json_.find(field_name);
+ if (expr_it == json_.end()) {
+ return default_value;
+ }
+
+ auto expr = Expression::FromJson(expr_it.value());
+ if (not expr) {
+ (*logger_)(
+ fmt::format("Failed to create expression from JSON:\n {}",
+ json_.dump()),
+ true);
+ }
+ return expr;
+ }
+
+ [[nodiscard]] auto ReadStringList(std::string const& field_name) const
+ -> std::optional<std::vector<std::string>> {
+ auto const& list =
+ GetOrDefault(json_, field_name, nlohmann::json::array());
+ if (not list.is_array()) {
+ (*logger_)(fmt::format("Field {} in {} {} is not a list",
+ field_name,
+ entity_type_,
+ id_.name),
+ true);
+ return std::nullopt;
+ }
+
+ auto vars = std::vector<std::string>{};
+ vars.reserve(list.size());
+
+ try {
+ std::transform(
+ list.begin(),
+ list.end(),
+ std::back_inserter(vars),
+ [](auto const& j) { return j.template get<std::string>(); });
+ } catch (...) {
+ (*logger_)(fmt::format("List entry in {} of {} {} is not a string",
+ field_name,
+ entity_type_,
+ id_.name),
+ true);
+ return std::nullopt;
+ }
+
+ return vars;
+ }
+
+ [[nodiscard]] auto ReadEntityAliasesObject(
+ std::string const& field_name) const -> std::optional<EntityAliases> {
+ auto const& map =
+ GetOrDefault(json_, field_name, nlohmann::json::object());
+ if (not map.is_object()) {
+ (*logger_)(fmt::format("Field {} in {} {} is not an object",
+ field_name,
+ entity_type_,
+ id_.name),
+ true);
+ return std::nullopt;
+ }
+
+ auto imports = EntityAliases{};
+ imports.reserve(map.size());
+
+ for (auto const& [key, val] : map.items()) {
+ auto expr_id = ParseEntityNameFromJson(
+ val,
+ id_,
+ [this, &field_name, entry = val.dump()](
+ std::string const& parse_err) {
+ (*logger_)(fmt::format("Parsing entry {} in field {} of {} "
+ "{} failed with:\n{}",
+ entry,
+ field_name,
+ entity_type_,
+ id_.name,
+ parse_err),
+ true);
+ });
+ if (not expr_id) {
+ return std::nullopt;
+ }
+ imports.emplace_back(key, *expr_id);
+ }
+ return imports;
+ }
+
+ void ExpectFields(std::unordered_set<std::string> const& expected) {
+ auto unexpected = nlohmann::json::array();
+ for (auto const& [key, value] : json_.items()) {
+ if (not expected.contains(key)) {
+ unexpected.push_back(key);
+ }
+ }
+ if (not unexpected.empty()) {
+ (*logger_)(fmt::format("{} {} has unexpected parameters {}",
+ entity_type_,
+ id_.ToString(),
+ unexpected.dump()),
+ false);
+ }
+ }
+
+ FieldReader(nlohmann::json json,
+ EntityName id,
+ std::string entity_type,
+ AsyncMapConsumerLoggerPtr logger) noexcept
+ : json_{std::move(json)},
+ id_{std::move(id)},
+ entity_type_{std::move(entity_type)},
+ logger_{std::move(logger)} {}
+
+ private:
+ nlohmann::json json_;
+ EntityName id_;
+ std::string entity_type_;
+ AsyncMapConsumerLoggerPtr logger_;
+};
+
+} // namespace BuildMaps::Base
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_FIELD_READER_HPP
diff --git a/src/buildtool/build_engine/base_maps/json_file_map.hpp b/src/buildtool/build_engine/base_maps/json_file_map.hpp
new file mode 100644
index 00000000..bf7495d7
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/json_file_map.hpp
@@ -0,0 +1,93 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_JSON_FILE_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_JSON_FILE_MAP_HPP
+
+#include <filesystem>
+#include <fstream>
+#include <string>
+
+#include "fmt/core.h"
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/base_maps/module_name.hpp"
+#include "src/buildtool/common/repository_config.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+
+namespace BuildMaps::Base {
+
+using JsonFileMap = AsyncMapConsumer<ModuleName, nlohmann::json>;
+
+// function pointer type for specifying which root to get from global config
+using RootGetter = auto (RepositoryConfig::*)(std::string const&) const
+ -> FileRoot const*;
+
+// function pointer type for specifying the file name from the global config
+using FileNameGetter = auto (RepositoryConfig::*)(std::string const&) const
+ -> std::string const*;
+
+template <RootGetter get_root, FileNameGetter get_name, bool kMandatory = true>
+auto CreateJsonFileMap(std::size_t jobs) -> JsonFileMap {
+ auto json_file_reader = [](auto /* unused */,
+ auto setter,
+ auto logger,
+ auto /* unused */,
+ auto const& key) {
+ auto const& config = RepositoryConfig::Instance();
+ auto const* root = (config.*get_root)(key.repository);
+ auto const* json_file_name = (config.*get_name)(key.repository);
+ if (root == nullptr or json_file_name == nullptr) {
+ (*logger)(fmt::format("Cannot determine root or JSON file name for "
+ "repository {}.",
+ key.repository),
+ true);
+ return;
+ }
+ auto module = std::filesystem::path{key.module}.lexically_normal();
+ if (module.is_absolute() or *module.begin() == "..") {
+ (*logger)(fmt::format("Modules have to live inside their "
+ "repository, but found {}.",
+ key.module),
+ true);
+ return;
+ }
+ auto json_file_path = module / *json_file_name;
+
+ if (not root->IsFile(json_file_path)) {
+ if constexpr (kMandatory) {
+ (*logger)(fmt::format("JSON file {} does not exist.",
+ json_file_path.string()),
+ true);
+ }
+ else {
+ (*setter)(nlohmann::json::object());
+ }
+ return;
+ }
+
+ auto const file_content = root->ReadFile(json_file_path);
+ if (not file_content) {
+ (*logger)(fmt::format("cannot read JSON file {}.",
+ json_file_path.string()),
+ true);
+ return;
+ }
+ auto json = nlohmann::json::parse(*file_content, nullptr, false);
+ if (json.is_discarded()) {
+ (*logger)(fmt::format("JSON file {} does not contain valid JSON.",
+ json_file_path.string()),
+ true);
+ return;
+ }
+ if (!json.is_object()) {
+ (*logger)(fmt::format("JSON in {} is not an object.",
+ json_file_path.string()),
+ true);
+ return;
+ }
+ (*setter)(std::move(json));
+ };
+ return AsyncMapConsumer<ModuleName, nlohmann::json>{json_file_reader, jobs};
+}
+
+} // namespace BuildMaps::Base
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_JSON_FILE_MAP_HPP
diff --git a/src/buildtool/build_engine/base_maps/module_name.hpp b/src/buildtool/build_engine/base_maps/module_name.hpp
new file mode 100644
index 00000000..26465bf6
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/module_name.hpp
@@ -0,0 +1,36 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_MODULE_NAME_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_MODULE_NAME_HPP
+
+#include "src/utils/cpp/hash_combine.hpp"
+
+namespace BuildMaps::Base {
+
+struct ModuleName {
+ std::string repository{};
+ std::string module{};
+
+ ModuleName(std::string repository, std::string module)
+ : repository{std::move(repository)}, module{std::move(module)} {}
+
+ [[nodiscard]] auto operator==(ModuleName const& other) const noexcept
+ -> bool {
+ return module == other.module && repository == other.repository;
+ }
+};
+} // namespace BuildMaps::Base
+
+namespace std {
+template <>
+struct hash<BuildMaps::Base::ModuleName> {
+ [[nodiscard]] auto operator()(
+ const BuildMaps::Base::ModuleName& t) const noexcept -> std::size_t {
+ size_t seed{};
+ hash_combine<std::string>(&seed, t.repository);
+ hash_combine<std::string>(&seed, t.module);
+ return seed;
+ }
+};
+
+} // namespace std
+
+#endif
diff --git a/src/buildtool/build_engine/base_maps/rule_map.cpp b/src/buildtool/build_engine/base_maps/rule_map.cpp
new file mode 100644
index 00000000..7bdc14e4
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/rule_map.cpp
@@ -0,0 +1,371 @@
+
+#include "src/buildtool/build_engine/base_maps/rule_map.hpp"
+
+#include <optional>
+#include <string>
+#include <unordered_set>
+
+#include "fmt/core.h"
+#include "src/buildtool/build_engine/base_maps/field_reader.hpp"
+
+namespace BuildMaps::Base {
+
+namespace {
+
+auto rule_fields = std::unordered_set<std::string>{"anonymous",
+ "config_doc",
+ "config_fields",
+ "config_transitions",
+ "config_vars",
+ "doc",
+ "expression",
+ "field_doc",
+ "implicit",
+ "imports",
+ "import",
+ "string_fields",
+ "tainted",
+ "target_fields"};
+
+[[nodiscard]] auto ReadAnonymousObject(EntityName const& id,
+ nlohmann::json const& json,
+ AsyncMapConsumerLoggerPtr const& logger)
+ -> std::optional<UserRule::anonymous_defs_t> {
+ auto obj = GetOrDefault(json, "anonymous", nlohmann::json::object());
+ if (not obj.is_object()) {
+ (*logger)(
+ fmt::format("Field anonymous in rule {} is not an object", id.name),
+ true);
+ return std::nullopt;
+ }
+
+ UserRule::anonymous_defs_t anon_defs{};
+ anon_defs.reserve(obj.size());
+ for (auto const& [name, def] : obj.items()) {
+ if (not def.is_object()) {
+ (*logger)(fmt::format("Entry {} in field anonymous in rule {} is "
+ "not an object",
+ name,
+ id.name),
+ true);
+ return std::nullopt;
+ }
+
+ auto target = def.find("target");
+ if (target == def.end()) {
+ (*logger)(fmt::format("Entry target for {} in field anonymous in "
+ "rule {} is missing",
+ name,
+ id.name),
+ true);
+ return std::nullopt;
+ }
+ if (not target->is_string()) {
+ (*logger)(fmt::format("Entry target for {} in field anonymous in "
+ "rule {} is not a string",
+ name,
+ id.name),
+ true);
+ return std::nullopt;
+ }
+
+ auto provider = def.find("provider");
+ if (provider == def.end()) {
+ (*logger)(fmt::format("Entry provider for {} in field anonymous in "
+ "rule {} is missing",
+ name,
+ id.name),
+ true);
+ return std::nullopt;
+ }
+ if (not provider->is_string()) {
+ (*logger)(fmt::format("Entry provider for {} in field anonymous in "
+ "rule {} is not a string",
+ name,
+ id.name),
+ true);
+ return std::nullopt;
+ }
+
+ auto rule_map = def.find("rule_map");
+ if (rule_map == def.end()) {
+ (*logger)(fmt::format("Entry rule_map for {} in field anonymous in "
+ "rule {} is missing",
+ name,
+ id.name),
+ true);
+ return std::nullopt;
+ }
+ if (not rule_map->is_object()) {
+ (*logger)(fmt::format("Entry rule_map for {} in field anonymous in "
+ "rule {} is not an object",
+ name,
+ id.name),
+ true);
+ return std::nullopt;
+ }
+
+ Expression::map_t::underlying_map_t rule_mapping{};
+ for (auto const& [key, val] : rule_map->items()) {
+ auto rule_name = ParseEntityNameFromJson(
+ val, id, [&logger, &id, &name = name](auto msg) {
+ (*logger)(
+ fmt::format("Parsing rule name for entry {} in field "
+ "anonymous in rule {} failed with:\n{}",
+ name,
+ id.name,
+ msg),
+ true);
+ });
+ if (not rule_name) {
+ return std::nullopt;
+ }
+ rule_mapping.emplace(key, ExpressionPtr{std::move(*rule_name)});
+ }
+
+ anon_defs.emplace(
+ name,
+ UserRule::AnonymousDefinition{
+ target->get<std::string>(),
+ provider->get<std::string>(),
+ ExpressionPtr{Expression::map_t{std::move(rule_mapping)}}});
+ }
+ return anon_defs;
+}
+
+[[nodiscard]] auto ReadImplicitObject(EntityName const& id,
+ nlohmann::json const& json,
+ AsyncMapConsumerLoggerPtr const& logger)
+ -> std::optional<UserRule::implicit_t> {
+ auto map = GetOrDefault(json, "implicit", nlohmann::json::object());
+ if (not map.is_object()) {
+ (*logger)(
+ fmt::format("Field implicit in rule {} is not an object", id.name),
+ true);
+ return std::nullopt;
+ }
+
+ auto implicit_targets = UserRule::implicit_t{};
+ implicit_targets.reserve(map.size());
+
+ for (auto const& [key, val] : map.items()) {
+ if (not val.is_array()) {
+ (*logger)(fmt::format("Entry in implicit field of rule {} is not a "
+ "list.",
+ id.name),
+ true);
+ return std::nullopt;
+ }
+ auto targets = typename UserRule::implicit_t::mapped_type{};
+ targets.reserve(val.size());
+ for (auto const& item : val) {
+ auto expr_id = ParseEntityNameFromJson(
+ item, id, [&logger, &item, &id](std::string const& parse_err) {
+ (*logger)(fmt::format("Parsing entry {} in implicit field "
+ "of rule {} failed with:\n{}",
+ item.dump(),
+ id.name,
+ parse_err),
+ true);
+ });
+ if (not expr_id) {
+ return std::nullopt;
+ }
+ targets.emplace_back(*expr_id);
+ }
+ implicit_targets.emplace(key, targets);
+ }
+ return implicit_targets;
+}
+
+[[nodiscard]] auto ReadConfigTransitionsObject(
+ EntityName const& id,
+ nlohmann::json const& json,
+ std::vector<std::string> const& config_vars,
+ ExpressionFunction::imports_t const& imports,
+ AsyncMapConsumerLoggerPtr const& logger)
+ -> std::optional<UserRule::config_trans_t> {
+ auto map =
+ GetOrDefault(json, "config_transitions", nlohmann::json::object());
+ if (not map.is_object()) {
+ (*logger)(
+ fmt::format("Field config_transitions in rule {} is not an object",
+ id.name),
+ true);
+ return std::nullopt;
+ }
+
+ auto config_transitions = UserRule::config_trans_t{};
+ config_transitions.reserve(map.size());
+
+ for (auto const& [key, val] : map.items()) {
+ auto expr = Expression::FromJson(val);
+ if (not expr) {
+ (*logger)(fmt::format("Failed to create expression for entry {} in "
+ "config_transitions list of rule {}.",
+ key,
+ id.name),
+ true);
+ return std::nullopt;
+ }
+ config_transitions.emplace(
+ key,
+ std::make_shared<ExpressionFunction>(config_vars, imports, expr));
+ }
+ return config_transitions;
+}
+
+} // namespace
+
+auto CreateRuleMap(gsl::not_null<RuleFileMap*> const& rule_file_map,
+ gsl::not_null<ExpressionFunctionMap*> const& expr_map,
+ std::size_t jobs) -> UserRuleMap {
+ auto user_rule_creator = [rule_file_map, expr_map](auto ts,
+ auto setter,
+ auto logger,
+ auto /*subcaller*/,
+ auto const& id) {
+ if (not id.IsDefinitionName()) {
+ (*logger)(fmt::format("{} cannot name a rule", id.ToString()),
+ true);
+ return;
+ }
+ rule_file_map->ConsumeAfterKeysReady(
+ ts,
+ {id.ToModule()},
+ [ts, expr_map, setter = std::move(setter), logger, id](
+ auto json_values) {
+ auto rule_it = json_values[0]->find(id.name);
+ if (rule_it == json_values[0]->end()) {
+ (*logger)(
+ fmt::format(
+ "Cannot find rule {} in {}", id.name, id.module),
+ true);
+ return;
+ }
+
+ auto reader =
+ FieldReader::Create(rule_it.value(), id, "rule", logger);
+ if (not reader) {
+ return;
+ }
+ reader->ExpectFields(rule_fields);
+
+ auto expr = reader->ReadExpression("expression");
+ if (not expr) {
+ return;
+ }
+
+ auto target_fields = reader->ReadStringList("target_fields");
+ if (not target_fields) {
+ return;
+ }
+
+ auto string_fields = reader->ReadStringList("string_fields");
+ if (not string_fields) {
+ return;
+ }
+
+ auto config_fields = reader->ReadStringList("config_fields");
+ if (not config_fields) {
+ return;
+ }
+
+ auto implicit_targets =
+ ReadImplicitObject(id, rule_it.value(), logger);
+ if (not implicit_targets) {
+ return;
+ }
+
+ auto anonymous_defs =
+ ReadAnonymousObject(id, rule_it.value(), logger);
+ if (not anonymous_defs) {
+ return;
+ }
+
+ auto config_vars = reader->ReadStringList("config_vars");
+ if (not config_vars) {
+ return;
+ }
+
+ auto tainted = reader->ReadStringList("tainted");
+ if (not tainted) {
+ return;
+ }
+
+ auto import_aliases =
+ reader->ReadEntityAliasesObject("imports");
+ if (not import_aliases) {
+ return;
+ }
+ auto [names, ids] = std::move(*import_aliases).Obtain();
+
+ expr_map->ConsumeAfterKeysReady(
+ ts,
+ std::move(ids),
+ [ts,
+ id,
+ json = rule_it.value(),
+ expr = std::move(expr),
+ target_fields = std::move(*target_fields),
+ string_fields = std::move(*string_fields),
+ config_fields = std::move(*config_fields),
+ implicit_targets = std::move(*implicit_targets),
+ anonymous_defs = std::move(*anonymous_defs),
+ config_vars = std::move(*config_vars),
+ tainted = std::move(*tainted),
+ names = std::move(names),
+ setter = std::move(setter),
+ logger](auto expr_funcs) {
+ auto imports = ExpressionFunction::imports_t{};
+ imports.reserve(expr_funcs.size());
+ for (std::size_t i{}; i < expr_funcs.size(); ++i) {
+ imports.emplace(names[i], *expr_funcs[i]);
+ }
+
+ auto config_transitions = ReadConfigTransitionsObject(
+ id, json, config_vars, imports, logger);
+ if (not config_transitions) {
+ return;
+ }
+
+ auto rule = UserRule::Create(
+ target_fields,
+ string_fields,
+ config_fields,
+ implicit_targets,
+ anonymous_defs,
+ config_vars,
+ tainted,
+ std::move(*config_transitions),
+ std::make_shared<ExpressionFunction>(
+ std::move(config_vars),
+ std::move(imports),
+ std::move(expr)),
+ [&logger](auto const& msg) {
+ (*logger)(msg, true);
+ });
+ if (rule) {
+ (*setter)(std::move(rule));
+ }
+ },
+ [logger, id](auto msg, auto fatal) {
+ (*logger)(fmt::format("While reading expression map "
+ "for rule {} in {}: {}",
+ id.name,
+ id.module,
+ msg),
+ fatal);
+ });
+ },
+ [logger, id](auto msg, auto fatal) {
+ (*logger)(
+ fmt::format(
+ "While reading rule file in {}: {}", id.module, msg),
+ fatal);
+ });
+ };
+ return UserRuleMap{user_rule_creator, jobs};
+}
+
+} // namespace BuildMaps::Base
diff --git a/src/buildtool/build_engine/base_maps/rule_map.hpp b/src/buildtool/build_engine/base_maps/rule_map.hpp
new file mode 100644
index 00000000..1547e720
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/rule_map.hpp
@@ -0,0 +1,33 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_RULE_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_RULE_MAP_HPP
+
+#include <memory>
+#include <string>
+
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/base_maps/expression_map.hpp"
+#include "src/buildtool/build_engine/base_maps/json_file_map.hpp"
+#include "src/buildtool/build_engine/base_maps/module_name.hpp"
+#include "src/buildtool/build_engine/base_maps/user_rule.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+
+namespace BuildMaps::Base {
+
+using RuleFileMap = AsyncMapConsumer<ModuleName, nlohmann::json>;
+
+constexpr auto CreateRuleFileMap =
+ CreateJsonFileMap<&RepositoryConfig::RuleRoot,
+ &RepositoryConfig::RuleFileName,
+ /*kMandatory=*/true>;
+
+using UserRuleMap = AsyncMapConsumer<EntityName, UserRulePtr>;
+
+auto CreateRuleMap(gsl::not_null<RuleFileMap*> const& rule_file_map,
+ gsl::not_null<ExpressionFunctionMap*> const& expr_map,
+ std::size_t jobs = 0) -> UserRuleMap;
+
+} // namespace BuildMaps::Base
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_RULE_MAP_HPP
diff --git a/src/buildtool/build_engine/base_maps/source_map.cpp b/src/buildtool/build_engine/base_maps/source_map.cpp
new file mode 100644
index 00000000..5c2a79d0
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/source_map.cpp
@@ -0,0 +1,88 @@
+#include "src/buildtool/build_engine/base_maps/source_map.hpp"
+
+#include <filesystem>
+
+#include "src/buildtool/common/artifact_digest.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+#include "src/utils/cpp/json.hpp"
+
+namespace BuildMaps::Base {
+
+namespace {
+
+auto as_target(const BuildMaps::Base::EntityName& key, ExpressionPtr artifact)
+ -> AnalysedTargetPtr {
+ auto stage =
+ ExpressionPtr{Expression::map_t{key.name, std::move(artifact)}};
+ return std::make_shared<AnalysedTarget>(
+ TargetResult{stage, Expression::kEmptyMap, stage},
+ std::vector<ActionDescription>{},
+ std::vector<std::string>{},
+ std::vector<Tree>{},
+ std::unordered_set<std::string>{},
+ std::set<std::string>{});
+}
+
+} // namespace
+
+auto CreateSourceTargetMap(const gsl::not_null<DirectoryEntriesMap*>& dirs,
+ std::size_t jobs) -> SourceTargetMap {
+ auto src_target_reader = [dirs](auto ts,
+ auto setter,
+ auto logger,
+ auto /* unused */,
+ auto const& key) {
+ using std::filesystem::path;
+ auto name = path(key.name).lexically_normal();
+ if (name.is_absolute() or *name.begin() == "..") {
+ (*logger)(
+ fmt::format("Source file reference outside current module: {}",
+ key.name),
+ true);
+ return;
+ }
+ auto dir = (path(key.module) / name).parent_path();
+ auto const* ws_root =
+ RepositoryConfig::Instance().WorkspaceRoot(key.repository);
+
+ auto src_file_reader = [ts, key, name, setter, logger, dir, ws_root](
+ bool exists_in_ws_root) {
+ if (ws_root != nullptr and exists_in_ws_root) {
+ if (auto desc = ws_root->ToArtifactDescription(
+ path(key.module) / name, key.repository)) {
+ (*setter)(as_target(key, ExpressionPtr{std::move(*desc)}));
+ return;
+ }
+ }
+ (*logger)(fmt::format("Cannot determine source file {}",
+ path(key.name).filename().string()),
+ true);
+ };
+
+ if (ws_root != nullptr and ws_root->HasFastDirectoryLookup()) {
+ // by-pass directory map and directly attempt to read from ws_root
+ src_file_reader(ws_root->IsFile(path(key.module) / name));
+ return;
+ }
+ dirs->ConsumeAfterKeysReady(
+ ts,
+ {ModuleName{key.repository, dir.string()}},
+ [key, src_file_reader](auto values) {
+ src_file_reader(
+ values[0]->Contains(path(key.name).filename().string()));
+ },
+ [logger, dir](auto msg, auto fatal) {
+ (*logger)(
+ fmt::format(
+ "While reading contents of {}: {}", dir.string(), msg),
+ fatal);
+ }
+
+ );
+ };
+ return AsyncMapConsumer<EntityName, AnalysedTargetPtr>(src_target_reader,
+ jobs);
+}
+
+}; // namespace BuildMaps::Base
diff --git a/src/buildtool/build_engine/base_maps/source_map.hpp b/src/buildtool/build_engine/base_maps/source_map.hpp
new file mode 100644
index 00000000..a8e9fd9b
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/source_map.hpp
@@ -0,0 +1,24 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_SOURCE_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_SOURCE_MAP_HPP
+
+#include <unordered_set>
+
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/analysed_target/analysed_target.hpp"
+#include "src/buildtool/build_engine/base_maps/directory_map.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+
+namespace BuildMaps::Base {
+
+using SourceTargetMap = AsyncMapConsumer<EntityName, AnalysedTargetPtr>;
+
+auto CreateSourceTargetMap(const gsl::not_null<DirectoryEntriesMap*>& dirs,
+ std::size_t jobs = 0) -> SourceTargetMap;
+
+} // namespace BuildMaps::Base
+
+#endif
diff --git a/src/buildtool/build_engine/base_maps/targets_file_map.hpp b/src/buildtool/build_engine/base_maps/targets_file_map.hpp
new file mode 100644
index 00000000..23db052e
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/targets_file_map.hpp
@@ -0,0 +1,23 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_TARGETS_FILE_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_TARGETS_FILE_MAP_HPP
+
+#include <filesystem>
+#include <string>
+
+#include "nlohmann/json.hpp"
+#include "src/buildtool/build_engine/base_maps/json_file_map.hpp"
+#include "src/buildtool/build_engine/base_maps/module_name.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+
+namespace BuildMaps::Base {
+
+using TargetsFileMap = AsyncMapConsumer<ModuleName, nlohmann::json>;
+
+constexpr auto CreateTargetsFileMap =
+ CreateJsonFileMap<&RepositoryConfig::TargetRoot,
+ &RepositoryConfig::TargetFileName,
+ /*kMandatory=*/true>;
+
+} // namespace BuildMaps::Base
+
+#endif
diff --git a/src/buildtool/build_engine/base_maps/user_rule.hpp b/src/buildtool/build_engine/base_maps/user_rule.hpp
new file mode 100644
index 00000000..807e3478
--- /dev/null
+++ b/src/buildtool/build_engine/base_maps/user_rule.hpp
@@ -0,0 +1,404 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_USER_RULE_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_USER_RULE_HPP
+
+#include <algorithm>
+#include <memory>
+#include <set>
+#include <sstream>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+#include "fmt/core.h"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/base_maps/expression_function.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/utils/cpp/concepts.hpp"
+
+namespace BuildMaps::Base {
+
+// Get duplicates from containers.
+// NOTE: Requires all input containers to be sorted!
+// kTriangular=true Performs triangular compare, everyone with everyone.
+// kTriangular=false Performs linear compare, first with each of the rest.
+template <bool kTriangular,
+ InputIterableContainer T_Container,
+ InputIterableContainer... T_Rest,
+ OutputIterableContainer T_Result =
+ std::unordered_set<typename T_Container::value_type>>
+[[nodiscard]] static inline auto GetDuplicates(T_Container const& first,
+ T_Rest const&... rest)
+ -> T_Result;
+
+template <InputIterableStringContainer T_Container>
+[[nodiscard]] static inline auto JoinContainer(T_Container const& c,
+ std::string const& sep)
+ -> std::string;
+
+class UserRule {
+ public:
+ using Ptr = std::shared_ptr<UserRule>;
+ using implicit_t = std::unordered_map<std::string, std::vector<EntityName>>;
+ using implicit_exp_t = std::unordered_map<std::string, ExpressionPtr>;
+ using config_trans_t =
+ std::unordered_map<std::string, ExpressionFunctionPtr>;
+
+ struct AnonymousDefinition {
+ std::string target;
+ std::string provider;
+ ExpressionPtr rule_map;
+ };
+ using anonymous_defs_t =
+ std::unordered_map<std::string, AnonymousDefinition>;
+
+ [[nodiscard]] static auto Create(
+ std::vector<std::string> target_fields,
+ std::vector<std::string> string_fields,
+ std::vector<std::string> config_fields,
+ implicit_t const& implicit_targets,
+ anonymous_defs_t anonymous_defs,
+ std::vector<std::string> const& config_vars,
+ std::vector<std::string> const& tainted,
+ config_trans_t config_transitions,
+ ExpressionFunctionPtr const& expr,
+ std::function<void(std::string const&)> const& logger) -> Ptr {
+
+ auto implicit_fields = std::vector<std::string>{};
+ implicit_fields.reserve(implicit_targets.size());
+ std::transform(implicit_targets.begin(),
+ implicit_targets.end(),
+ std::back_inserter(implicit_fields),
+ [](auto const& el) { return el.first; });
+ std::sort(implicit_fields.begin(), implicit_fields.end());
+
+ auto anonymous_fields = std::vector<std::string>{};
+ anonymous_fields.reserve(anonymous_defs.size());
+ std::transform(anonymous_defs.begin(),
+ anonymous_defs.end(),
+ std::back_inserter(anonymous_fields),
+ [](auto const& el) { return el.first; });
+ std::sort(anonymous_fields.begin(), anonymous_fields.end());
+
+ std::sort(target_fields.begin(), target_fields.end());
+ std::sort(string_fields.begin(), string_fields.end());
+ std::sort(config_fields.begin(), config_fields.end());
+
+ auto dups = GetDuplicates</*kTriangular=*/false>(kReservedKeywords,
+ target_fields,
+ string_fields,
+ config_fields,
+ implicit_fields,
+ anonymous_fields);
+ if (not dups.empty()) {
+ logger(
+ fmt::format("User-defined fields cannot be any of the reserved "
+ "fields [{}]",
+ JoinContainer(kReservedKeywords, ",")));
+ return nullptr;
+ }
+
+ dups = GetDuplicates</*kTriangular=*/true>(target_fields,
+ string_fields,
+ config_fields,
+ implicit_fields,
+ anonymous_fields);
+
+ if (not dups.empty()) {
+ logger(
+ fmt::format("A field can have only one type, but the following "
+ "have more: [{}]",
+ JoinContainer(dups, ",")));
+ return nullptr;
+ }
+
+ auto transition_targets = std::vector<std::string>{};
+ transition_targets.reserve(config_transitions.size());
+ std::transform(config_transitions.begin(),
+ config_transitions.end(),
+ std::back_inserter(transition_targets),
+ [](auto const& el) { return el.first; });
+ std::sort(transition_targets.begin(), transition_targets.end());
+
+ dups = GetDuplicates</*kTriangular=*/false>(transition_targets,
+ target_fields,
+ implicit_fields,
+ anonymous_fields);
+ if (dups != decltype(dups){transition_targets.begin(),
+ transition_targets.end()}) {
+ logger(
+ fmt::format("Config transitions has to be a map from target "
+ "fields to transition expressions, but found [{}]",
+ JoinContainer(transition_targets, ",")));
+ return nullptr;
+ }
+
+ auto const setter = [&config_transitions](auto const field) {
+ config_transitions.emplace(
+ field,
+ ExpressionFunction::kEmptyTransition); // wont overwrite
+ };
+ config_transitions.reserve(target_fields.size() +
+ implicit_fields.size() +
+ anonymous_fields.size());
+ std::for_each(target_fields.begin(), target_fields.end(), setter);
+ std::for_each(implicit_fields.begin(), implicit_fields.end(), setter);
+ std::for_each(anonymous_fields.begin(), anonymous_fields.end(), setter);
+
+ implicit_exp_t implicit_target_exp;
+ implicit_target_exp.reserve(implicit_targets.size());
+ for (auto const& [target_name, target_entity_vec] : implicit_targets) {
+ std::vector<ExpressionPtr> target_exps;
+ target_exps.reserve(target_entity_vec.size());
+ for (auto const& target_entity : target_entity_vec) {
+ target_exps.emplace_back(ExpressionPtr{target_entity});
+ }
+ implicit_target_exp.emplace(target_name, target_exps);
+ }
+
+ return std::make_shared<UserRule>(
+ std::move(target_fields),
+ std::move(string_fields),
+ std::move(config_fields),
+ implicit_targets,
+ std::move(implicit_target_exp),
+ std::move(anonymous_defs),
+ config_vars,
+ std::set<std::string>{tainted.begin(), tainted.end()},
+ std::move(config_transitions),
+ expr);
+ }
+
+ UserRule(std::vector<std::string> target_fields,
+ std::vector<std::string> string_fields,
+ std::vector<std::string> config_fields,
+ implicit_t implicit_targets,
+ implicit_exp_t implicit_target_exp,
+ anonymous_defs_t anonymous_defs,
+ std::vector<std::string> config_vars,
+ std::set<std::string> tainted,
+ config_trans_t config_transitions,
+ ExpressionFunctionPtr expr) noexcept
+ : target_fields_{std::move(target_fields)},
+ string_fields_{std::move(string_fields)},
+ config_fields_{std::move(config_fields)},
+ implicit_targets_{std::move(implicit_targets)},
+ implicit_target_exp_{std::move(implicit_target_exp)},
+ anonymous_defs_{std::move(anonymous_defs)},
+ config_vars_{std::move(config_vars)},
+ tainted_{std::move(tainted)},
+ config_transitions_{std::move(config_transitions)},
+ expr_{std::move(expr)} {}
+
+ [[nodiscard]] auto TargetFields() const& noexcept
+ -> std::vector<std::string> const& {
+ return target_fields_;
+ }
+
+ [[nodiscard]] auto TargetFields() && noexcept -> std::vector<std::string> {
+ return std::move(target_fields_);
+ }
+
+ [[nodiscard]] auto StringFields() const& noexcept
+ -> std::vector<std::string> const& {
+ return string_fields_;
+ }
+
+ [[nodiscard]] auto StringFields() && noexcept -> std::vector<std::string> {
+ return std::move(string_fields_);
+ }
+
+ [[nodiscard]] auto ConfigFields() const& noexcept
+ -> std::vector<std::string> const& {
+ return config_fields_;
+ }
+
+ [[nodiscard]] auto ConfigFields() && noexcept -> std::vector<std::string> {
+ return std::move(config_fields_);
+ }
+
+ [[nodiscard]] auto ImplicitTargets() const& noexcept -> implicit_t const& {
+ return implicit_targets_;
+ }
+
+ [[nodiscard]] auto ImplicitTargets() && noexcept -> implicit_t {
+ return std::move(implicit_targets_);
+ }
+
+ [[nodiscard]] auto ImplicitTargetExps() const& noexcept
+ -> implicit_exp_t const& {
+ return implicit_target_exp_;
+ }
+
+ [[nodiscard]] auto ExpectedFields() const& noexcept
+ -> std::unordered_set<std::string> const& {
+ return expected_entries_;
+ }
+
+ [[nodiscard]] auto ConfigVars() const& noexcept
+ -> std::vector<std::string> const& {
+ return config_vars_;
+ }
+
+ [[nodiscard]] auto ConfigVars() && noexcept -> std::vector<std::string> {
+ return std::move(config_vars_);
+ }
+
+ [[nodiscard]] auto Tainted() const& noexcept
+ -> std::set<std::string> const& {
+ return tainted_;
+ }
+
+ [[nodiscard]] auto Tainted() && noexcept -> std::set<std::string> {
+ return std::move(tainted_);
+ }
+
+ [[nodiscard]] auto ConfigTransitions() const& noexcept
+ -> config_trans_t const& {
+ return config_transitions_;
+ }
+
+ [[nodiscard]] auto ConfigTransitions() && noexcept -> config_trans_t {
+ return std::move(config_transitions_);
+ }
+
+ [[nodiscard]] auto Expression() const& noexcept
+ -> ExpressionFunctionPtr const& {
+ return expr_;
+ }
+
+ [[nodiscard]] auto Expression() && noexcept -> ExpressionFunctionPtr {
+ return std::move(expr_);
+ }
+
+ [[nodiscard]] auto AnonymousDefinitions() const& noexcept
+ -> anonymous_defs_t {
+ return anonymous_defs_;
+ }
+
+ [[nodiscard]] auto AnonymousDefinitions() && noexcept -> anonymous_defs_t {
+ return std::move(anonymous_defs_);
+ }
+
+ private:
+ // NOTE: Must be sorted
+ static inline std::vector<std::string> const kReservedKeywords{
+ "arguments_config",
+ "tainted",
+ "type"};
+
+ static auto ComputeExpectedEntries(std::vector<std::string> tfields,
+ std::vector<std::string> sfields,
+ std::vector<std::string> cfields)
+ -> std::unordered_set<std::string> {
+ size_t n = 0;
+ n += tfields.size();
+ n += sfields.size();
+ n += cfields.size();
+ n += kReservedKeywords.size();
+ std::unordered_set<std::string> expected_entries{};
+ expected_entries.reserve(n);
+ expected_entries.insert(tfields.begin(), tfields.end());
+ expected_entries.insert(sfields.begin(), sfields.end());
+ expected_entries.insert(cfields.begin(), cfields.end());
+ expected_entries.insert(kReservedKeywords.begin(),
+ kReservedKeywords.end());
+ return expected_entries;
+ }
+
+ std::vector<std::string> target_fields_{};
+ std::vector<std::string> string_fields_{};
+ std::vector<std::string> config_fields_{};
+ implicit_t implicit_targets_{};
+ implicit_exp_t implicit_target_exp_{};
+ anonymous_defs_t anonymous_defs_{};
+ std::vector<std::string> config_vars_{};
+ std::set<std::string> tainted_{};
+ config_trans_t config_transitions_{};
+ ExpressionFunctionPtr expr_{};
+ std::unordered_set<std::string> expected_entries_{
+ ComputeExpectedEntries(target_fields_, string_fields_, config_fields_)};
+};
+
+using UserRulePtr = UserRule::Ptr;
+
+namespace detail {
+
+template <HasSize T_Container, HasSize... T_Rest>
+[[nodiscard]] static inline auto MaxSize(T_Container const& first,
+ T_Rest const&... rest) -> std::size_t {
+ if constexpr (sizeof...(rest) > 0) {
+ return std::max(first.size(), MaxSize(rest...));
+ }
+ return first.size();
+}
+
+template <bool kTriangular,
+ OutputIterableContainer T_Result,
+ InputIterableContainer T_First,
+ InputIterableContainer T_Second,
+ InputIterableContainer... T_Rest>
+static auto inline FindDuplicates(gsl::not_null<T_Result*> const& dups,
+ T_First const& first,
+ T_Second const& second,
+ T_Rest const&... rest) -> void {
+ gsl_ExpectsAudit(std::is_sorted(first.begin(), first.end()) and
+ std::is_sorted(second.begin(), second.end()));
+ std::set_intersection(first.begin(),
+ first.end(),
+ second.begin(),
+ second.end(),
+ std::inserter(*dups, dups->begin()));
+ if constexpr (sizeof...(rest) > 0) {
+ // n comparisons with rest: first<->rest[0], ..., first<->rest[n]
+ FindDuplicates</*kTriangular=*/false>(dups, first, rest...);
+ if constexpr (kTriangular) {
+ // do triangular compare of second with rest
+ FindDuplicates</*kTriangular=*/true>(dups, second, rest...);
+ }
+ }
+}
+
+} // namespace detail
+
+template <bool kTriangular,
+ InputIterableContainer T_Container,
+ InputIterableContainer... T_Rest,
+ OutputIterableContainer T_Result>
+[[nodiscard]] static inline auto GetDuplicates(T_Container const& first,
+ T_Rest const&... rest)
+ -> T_Result {
+ auto dups = T_Result{};
+ constexpr auto kNumContainers = 1 + sizeof...(rest);
+ if constexpr (kNumContainers > 1) {
+ std::size_t size{};
+ if constexpr (kTriangular) {
+ // worst case if all containers are of the same size
+ size = kNumContainers * detail::MaxSize(first, rest...) / 2;
+ }
+ else {
+ size = std::min(first.size(), detail::MaxSize(rest...));
+ }
+ dups.reserve(size);
+ detail::FindDuplicates<kTriangular, T_Result>(&dups, first, rest...);
+ }
+ return dups;
+}
+
+template <InputIterableStringContainer T_Container>
+[[nodiscard]] static inline auto JoinContainer(T_Container const& c,
+ std::string const& sep)
+ -> std::string {
+ std::ostringstream oss{};
+ std::size_t insert_sep{};
+ for (auto const& i : c) {
+ oss << (insert_sep++ ? sep.c_str() : "");
+ oss << i;
+ }
+ return oss.str();
+};
+
+} // namespace BuildMaps::Base
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_USER_RULE_HPP
diff --git a/src/buildtool/build_engine/expression/TARGETS b/src/buildtool/build_engine/expression/TARGETS
new file mode 100644
index 00000000..4f719185
--- /dev/null
+++ b/src/buildtool/build_engine/expression/TARGETS
@@ -0,0 +1,46 @@
+{ "linked_map":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["linked_map"]
+ , "hdrs": ["linked_map.hpp"]
+ , "deps":
+ [ ["@", "fmt", "", "fmt"]
+ , ["src/utils/cpp", "hash_combine"]
+ , ["src/utils/cpp", "atomic"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "expression"]
+ }
+, "expression":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["expression"]
+ , "hdrs":
+ [ "configuration.hpp"
+ , "expression_ptr.hpp"
+ , "expression.hpp"
+ , "function_map.hpp"
+ , "evaluator.hpp"
+ , "target_result.hpp"
+ , "target_node.hpp"
+ ]
+ , "srcs":
+ [ "expression_ptr.cpp"
+ , "expression.cpp"
+ , "evaluator.cpp"
+ , "target_node.cpp"
+ ]
+ , "deps":
+ [ "linked_map"
+ , ["src/buildtool/build_engine/base_maps", "entity_name_data"]
+ , ["src/buildtool/common", "artifact_description"]
+ , ["src/buildtool/crypto", "hash_generator"]
+ , ["src/buildtool/logging", "logging"]
+ , ["src/utils/cpp", "type_safe_arithmetic"]
+ , ["src/utils/cpp", "json"]
+ , ["src/utils/cpp", "hash_combine"]
+ , ["src/utils/cpp", "hex_string"]
+ , ["src/utils/cpp", "concepts"]
+ , ["src/utils/cpp", "atomic"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "expression"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/build_engine/expression/configuration.hpp b/src/buildtool/build_engine/expression/configuration.hpp
new file mode 100644
index 00000000..5d0b9c2a
--- /dev/null
+++ b/src/buildtool/build_engine/expression/configuration.hpp
@@ -0,0 +1,154 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_CONFIGURATION_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_CONFIGURATION_HPP
+
+#include <algorithm>
+#include <sstream>
+#include <string>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/utils/cpp/concepts.hpp"
+
+// Decorator for Expression containing a map. Adds Prune() and Update().
+class Configuration {
+ public:
+ explicit Configuration(ExpressionPtr expr) noexcept
+ : expr_{std::move(expr)} {
+ gsl_ExpectsAudit(expr_->IsMap());
+ }
+ explicit Configuration(Expression::map_t const& map) noexcept
+ : expr_{ExpressionPtr{map}} {}
+
+ Configuration() noexcept = default;
+ ~Configuration() noexcept = default;
+ Configuration(Configuration const&) noexcept = default;
+ Configuration(Configuration&&) noexcept = default;
+ auto operator=(Configuration const&) noexcept -> Configuration& = default;
+ auto operator=(Configuration&&) noexcept -> Configuration& = default;
+
+ [[nodiscard]] auto operator[](std::string const& key) const
+ -> ExpressionPtr {
+ return expr_->Get(key, Expression::none_t{});
+ }
+ [[nodiscard]] auto operator[](ExpressionPtr const& key) const
+ -> ExpressionPtr {
+ return expr_->Get(key->String(), Expression::none_t{});
+ }
+ [[nodiscard]] auto ToString() const -> std::string {
+ return expr_->ToString();
+ }
+ [[nodiscard]] auto ToJson() const -> nlohmann::json {
+ return expr_->ToJson();
+ }
+ [[nodiscard]] auto Enumerate(const std::string& prefix, size_t width) const
+ -> std::string {
+ std::stringstream ss{};
+ if (width > prefix.size()) {
+ size_t actual_width = width - prefix.size();
+ for (auto const& [key, value] : expr_->Map()) {
+ std::string key_str = Expression{key}.ToString();
+ std::string val_str = value->ToString();
+ if (actual_width > key_str.size() + 3) {
+ ss << prefix << key_str << " : ";
+ size_t remain = actual_width - key_str.size() - 3;
+ if (val_str.size() >= remain) {
+ ss << val_str.substr(0, remain - 3) << "...";
+ }
+ else {
+ ss << val_str;
+ }
+ }
+ else {
+ ss << prefix << key_str.substr(0, actual_width);
+ }
+ ss << std::endl;
+ }
+ }
+ return ss.str();
+ }
+
+ [[nodiscard]] auto operator==(const Configuration& other) const -> bool {
+ return expr_ == other.expr_;
+ }
+
+ [[nodiscard]] auto hash() const noexcept -> std::size_t {
+ return std::hash<ExpressionPtr>{}(expr_);
+ }
+
+ template <InputIterableStringContainer T>
+ [[nodiscard]] auto Prune(T const& vars) const -> Configuration {
+ auto subset = Expression::map_t::underlying_map_t{};
+ std::for_each(vars.begin(), vars.end(), [&](auto const& k) {
+ auto const& map = expr_->Map();
+ auto v = map.Find(k);
+ if (v) {
+ subset.emplace(k, v->get());
+ }
+ else {
+ subset.emplace(k, Expression::kNone);
+ }
+ });
+ return Configuration{Expression::map_t{subset}};
+ }
+
+ [[nodiscard]] auto Prune(ExpressionPtr const& vars) const -> Configuration {
+ auto subset = Expression::map_t::underlying_map_t{};
+ auto const& list = vars->List();
+ std::for_each(list.begin(), list.end(), [&](auto const& k) {
+ auto const& map = expr_->Map();
+ auto const key = k->String();
+ auto v = map.Find(key);
+ if (v) {
+ subset.emplace(key, v->get());
+ }
+ else {
+ subset.emplace(key, ExpressionPtr{Expression::none_t{}});
+ }
+ });
+ return Configuration{Expression::map_t{subset}};
+ }
+
+ template <class T>
+ requires(Expression::IsValidType<T>() or std::is_same_v<T, ExpressionPtr>)
+ [[nodiscard]] auto Update(std::string const& name, T const& value) const
+ -> Configuration {
+ auto update = Expression::map_t::underlying_map_t{};
+ update.emplace(name, value);
+ return Configuration{Expression::map_t{expr_, update}};
+ }
+
+ [[nodiscard]] auto Update(
+ Expression::map_t::underlying_map_t const& map) const -> Configuration {
+ if (map.empty()) {
+ return *this;
+ }
+ return Configuration{Expression::map_t{expr_, map}};
+ }
+
+ [[nodiscard]] auto Update(ExpressionPtr const& map) const -> Configuration {
+ gsl_ExpectsAudit(map->IsMap());
+ if (map->Map().empty()) {
+ return *this;
+ }
+ return Configuration{Expression::map_t{expr_, map}};
+ }
+
+ [[nodiscard]] auto VariableFixed(std::string const& x) const -> bool {
+ return expr_->Map().Find(x).has_value();
+ }
+
+ private:
+ ExpressionPtr expr_{Expression::kEmptyMap};
+};
+
+namespace std {
+template <>
+struct hash<Configuration> {
+ [[nodiscard]] auto operator()(Configuration const& p) const noexcept
+ -> std::size_t {
+ return p.hash();
+ }
+};
+} // namespace std
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_CONFIGURATION_HPP
diff --git a/src/buildtool/build_engine/expression/evaluator.cpp b/src/buildtool/build_engine/expression/evaluator.cpp
new file mode 100644
index 00000000..b93fa9f9
--- /dev/null
+++ b/src/buildtool/build_engine/expression/evaluator.cpp
@@ -0,0 +1,936 @@
+#include "src/buildtool/build_engine/expression/evaluator.hpp"
+
+#include <algorithm>
+#include <exception>
+#include <filesystem>
+#include <sstream>
+#include <string>
+#include <unordered_set>
+
+#include "fmt/core.h"
+#include "src/buildtool/build_engine/expression/configuration.hpp"
+#include "src/buildtool/build_engine/expression/function_map.hpp"
+
+namespace {
+
+using namespace std::string_literals;
+using number_t = Expression::number_t;
+using list_t = Expression::list_t;
+using map_t = Expression::map_t;
+
+auto ValueIsTrue(ExpressionPtr const& val) -> bool {
+ if (val->IsNone()) {
+ return false;
+ }
+ if (val->IsBool()) {
+ return *val != false;
+ }
+ if (val->IsNumber()) {
+ return *val != number_t{0};
+ }
+ if (val->IsString()) {
+ return *val != ""s and *val != "0"s and *val != "NO"s;
+ }
+ if (val->IsList()) {
+ return not val->List().empty();
+ }
+ if (val->IsMap()) {
+ return not val->Map().empty();
+ }
+ return true;
+}
+
+auto Flatten(ExpressionPtr const& expr) -> ExpressionPtr {
+ if (not expr->IsList()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "Flatten expects list but instead got: {}.", expr->ToString())};
+ }
+ if (expr->List().empty()) {
+ return expr;
+ }
+ auto const& list = expr->List();
+ size_t size{};
+ std::for_each(list.begin(), list.end(), [&](auto const& l) {
+ if (not l->IsList()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("Non-list entry found for argument in flatten: {}.",
+ l->ToString())};
+ }
+ size += l->List().size();
+ });
+ auto result = Expression::list_t{};
+ result.reserve(size);
+ std::for_each(list.begin(), list.end(), [&](auto const& l) {
+ std::copy(
+ l->List().begin(), l->List().end(), std::back_inserter(result));
+ });
+ return ExpressionPtr{result};
+}
+
+auto All(ExpressionPtr const& list) -> ExpressionPtr {
+ for (auto const& c : list->List()) {
+ if (not ValueIsTrue(c)) {
+ return ExpressionPtr{false};
+ }
+ }
+ return ExpressionPtr{true};
+}
+
+auto Any(ExpressionPtr const& list) -> ExpressionPtr {
+ for (auto const& c : list->List()) {
+ if (ValueIsTrue(c)) {
+ return ExpressionPtr{true};
+ }
+ }
+ return ExpressionPtr{false};
+}
+
+// logical AND with short-circuit evaluation
+auto LogicalAnd(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ if (auto const list = expr->At("$1")) {
+ auto const& l = list->get();
+ if (not l->IsList()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("Non-list entry found for argument in and: {}.",
+ l->ToString())};
+ }
+ for (auto const& c : l->List()) {
+ if (not ValueIsTrue(eval(c, env))) {
+ return ExpressionPtr{false};
+ }
+ }
+ }
+ return ExpressionPtr{true};
+}
+
+// logical OR with short-circuit evaluation
+auto LogicalOr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ if (auto const list = expr->At("$1")) {
+ auto const& l = list->get();
+ if (not l->IsList()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "Non-list entry found for argument in or: {}.", l->ToString())};
+ }
+ for (auto const& c : l->List()) {
+ if (ValueIsTrue(eval(c, env))) {
+ return ExpressionPtr{true};
+ }
+ }
+ }
+ return ExpressionPtr{false};
+}
+
+auto Keys(ExpressionPtr const& d) -> ExpressionPtr {
+ auto const& m = d->Map();
+ auto result = Expression::list_t{};
+ result.reserve(m.size());
+ std::for_each(m.begin(), m.end(), [&](auto const& item) {
+ result.emplace_back(ExpressionPtr{item.first});
+ });
+ return ExpressionPtr{result};
+}
+
+auto Values(ExpressionPtr const& d) -> ExpressionPtr {
+ return ExpressionPtr{d->Map().Values()};
+}
+
+auto NubRight(ExpressionPtr const& expr) -> ExpressionPtr {
+ if (not expr->IsList()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "nub_right expects list but instead got: {}.", expr->ToString())};
+ }
+ if (expr->List().empty()) {
+ return expr;
+ }
+ auto const& list = expr->List();
+ auto reverse_result = Expression::list_t{};
+ reverse_result.reserve(list.size());
+ auto seen = std::unordered_set<ExpressionPtr>{};
+ seen.reserve(list.size());
+ std::for_each(list.rbegin(), list.rend(), [&](auto const& l) {
+ if (not seen.contains(l)) {
+ reverse_result.push_back(l);
+ seen.insert(l);
+ }
+ });
+ std::reverse(reverse_result.begin(), reverse_result.end());
+ return ExpressionPtr{reverse_result};
+}
+
+auto ChangeEndingTo(ExpressionPtr const& name, ExpressionPtr const& ending)
+ -> ExpressionPtr {
+ std::filesystem::path path{name->String()};
+ return ExpressionPtr{(path.parent_path() / path.stem()).string() +
+ ending->String()};
+}
+
+auto BaseName(ExpressionPtr const& name) -> ExpressionPtr {
+ std::filesystem::path path{name->String()};
+ return ExpressionPtr{path.filename().string()};
+}
+
+auto ShellQuote(std::string arg) -> std::string {
+ auto start_pos = size_t{};
+ std::string from{"'"};
+ std::string to{"'\\''"};
+ while ((start_pos = arg.find(from, start_pos)) != std::string::npos) {
+ arg.replace(start_pos, from.length(), to);
+ start_pos += to.length();
+ }
+ return fmt::format("'{}'", arg);
+}
+
+template <bool kDoQuote = false>
+auto Join(ExpressionPtr const& expr, std::string const& sep) -> ExpressionPtr {
+ if (expr->IsString()) {
+ auto string = expr->String();
+ if constexpr (kDoQuote) {
+ string = ShellQuote(std::move(string));
+ }
+ return ExpressionPtr{std::move(string)};
+ }
+ if (expr->IsList()) {
+ auto const& list = expr->List();
+ int insert_sep{};
+ std::stringstream ss{};
+ std::for_each(list.begin(), list.end(), [&](auto const& e) {
+ ss << (insert_sep++ ? sep : "");
+ auto string = e->String();
+ if constexpr (kDoQuote) {
+ string = ShellQuote(std::move(string));
+ }
+ ss << std::move(string);
+ });
+ return ExpressionPtr{ss.str()};
+ }
+ throw Evaluator::EvaluationError{fmt::format(
+ "Join expects string or list but got: {}.", expr->ToString())};
+}
+
+template <bool kDisjoint = false>
+auto Union(Expression::list_t const& dicts, size_t from, size_t to)
+ -> ExpressionPtr {
+ if (to <= from) {
+ return Expression::kEmptyMap;
+ }
+ if (to == from + 1) {
+ return dicts[from];
+ }
+ size_t mid = from + (to - from) / 2;
+ auto left = Union(dicts, from, mid);
+ auto right = Union(dicts, mid, to);
+ if (left->Map().empty()) {
+ return right;
+ }
+ if (right->Map().empty()) {
+ return left;
+ }
+ if constexpr (kDisjoint) {
+ auto dup = left->Map().FindConflictingDuplicate(right->Map());
+ if (dup) {
+ throw Evaluator::EvaluationError{
+ fmt::format("Map union not essentially disjoint as claimed, "
+ "duplicate key '{}'.",
+ dup->get())};
+ }
+ }
+ return ExpressionPtr{Expression::map_t{left, right}};
+}
+
+template <bool kDisjoint = false>
+auto Union(ExpressionPtr const& expr) -> ExpressionPtr {
+ if (not expr->IsList()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "Union expects list of maps but got: {}.", expr->ToString())};
+ }
+ auto const& list = expr->List();
+ if (list.empty()) {
+ return Expression::kEmptyMap;
+ }
+ return Union<kDisjoint>(list, 0, list.size());
+}
+
+auto ConcatTargetName(ExpressionPtr const& expr, ExpressionPtr const& append)
+ -> ExpressionPtr {
+ if (expr->IsString()) {
+ return ExpressionPtr{expr->String() + append->String()};
+ }
+ if (expr->IsList()) {
+ auto list = Expression::list_t{};
+ auto not_last = expr->List().size();
+ bool all_string = true;
+ std::for_each(
+ expr->List().begin(), expr->List().end(), [&](auto const& e) {
+ all_string = all_string and e->IsString();
+ if (all_string) {
+ list.emplace_back(ExpressionPtr{
+ e->String() + (--not_last ? "" : append->String())});
+ }
+ });
+ if (all_string) {
+ return ExpressionPtr{list};
+ }
+ }
+ throw Evaluator::EvaluationError{fmt::format(
+ "Unsupported expression for concat: {}.", expr->ToString())};
+}
+
+auto EvalArgument(ExpressionPtr const& expr,
+ std::string const& argument,
+ const SubExprEvaluator& eval,
+ Configuration const& env) -> ExpressionPtr {
+ try {
+ return eval(expr[argument], env);
+ } catch (Evaluator::EvaluationError const& ex) {
+ throw Evaluator::EvaluationError::WhileEval(
+ fmt::format("Evaluating argument {}:", argument), ex);
+ } catch (std::exception const& ex) {
+ throw Evaluator::EvaluationError::WhileEvaluating(
+ fmt::format("Evaluating argument {}:", argument), ex);
+ }
+}
+
+auto UnaryExpr(std::function<ExpressionPtr(ExpressionPtr const&)> const& f)
+ -> std::function<ExpressionPtr(SubExprEvaluator&&,
+ ExpressionPtr const&,
+ Configuration const&)> {
+ return [f](auto&& eval, auto const& expr, auto const& env) {
+ auto argument = EvalArgument(expr, "$1", eval, env);
+ try {
+ return f(argument);
+ } catch (Evaluator::EvaluationError const& ex) {
+ throw Evaluator::EvaluationError::WhileEval(
+ fmt::format("Having evaluted the argument to {}:",
+ argument->ToString()),
+ ex);
+ } catch (std::exception const& ex) {
+ throw Evaluator::EvaluationError::WhileEvaluating(
+ fmt::format("Having evaluted the argument to {}:",
+ argument->ToString()),
+ ex);
+ }
+ };
+}
+
+auto AndExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ if (auto const conds = expr->At("$1")) {
+ return conds->get()->IsList()
+ ? LogicalAnd(std::move(eval), expr, env)
+ : UnaryExpr(All)(std::move(eval), expr, env);
+ }
+ return ExpressionPtr{true};
+}
+
+auto OrExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ if (auto const conds = expr->At("$1")) {
+ return conds->get()->IsList()
+ ? LogicalOr(std::move(eval), expr, env)
+ : UnaryExpr(Any)(std::move(eval), expr, env);
+ }
+ return ExpressionPtr{false};
+}
+
+auto VarExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto result = env[expr["name"]];
+ if (result->IsNone()) {
+ return eval(expr->Get("default", Expression::none_t{}), env);
+ }
+ return result;
+}
+
+auto IfExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ if (ValueIsTrue(EvalArgument(expr, "cond", eval, env))) {
+ return EvalArgument(expr, "then", eval, env);
+ }
+ return eval(expr->Get("else", list_t{}), env);
+}
+
+auto CondExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto const& cond = expr->At("cond");
+ if (cond) {
+ if (not cond->get()->IsList()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "cond in cond has to be a list of pairs, but found {}",
+ cond->get()->ToString())};
+ }
+ for (const auto& pair : cond->get()->List()) {
+ if (not pair->IsList() or pair->List().size() != 2) {
+ throw Evaluator::EvaluationError{
+ fmt::format("cond in cond has to be a list of pairs, "
+ "but found entry {}",
+ pair->ToString())};
+ }
+ if (ValueIsTrue(eval(pair->List()[0], env))) {
+ return eval(pair->List()[1], env);
+ }
+ }
+ }
+ return eval(expr->Get("default", list_t{}), env);
+}
+
+auto CaseExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto const& cases = expr->At("case");
+ if (cases) {
+ if (not cases->get()->IsMap()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "case in case has to be a map of expressions, but found {}",
+ cases->get()->ToString())};
+ }
+ auto const& e = expr->At("expr");
+ if (not e) {
+ throw Evaluator::EvaluationError{"missing expr in case"};
+ }
+ auto const& key = eval(e->get(), env);
+ if (not key->IsString()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "expr in case must evaluate to string, but found {}",
+ key->ToString())};
+ }
+ if (auto const& val = cases->get()->At(key->String())) {
+ return eval(val->get(), env);
+ }
+ }
+ return eval(expr->Get("default", list_t{}), env);
+}
+
+auto SeqCaseExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto const& cases = expr->At("case");
+ if (cases) {
+ if (not cases->get()->IsList()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "case in case* has to be a list of pairs, but found {}",
+ cases->get()->ToString())};
+ }
+ auto const& e = expr->At("expr");
+ if (not e) {
+ throw Evaluator::EvaluationError{"missing expr in case"};
+ }
+ auto const& cmp = eval(e->get(), env);
+ for (const auto& pair : cases->get()->List()) {
+ if (not pair->IsList() or pair->List().size() != 2) {
+ throw Evaluator::EvaluationError{
+ fmt::format("case in case* has to be a list of pairs, "
+ "but found entry {}",
+ pair->ToString())};
+ }
+ if (cmp == eval(pair->List()[0], env)) {
+ return eval(pair->List()[1], env);
+ }
+ }
+ }
+ return eval(expr->Get("default", list_t{}), env);
+}
+
+auto EqualExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ return ExpressionPtr{EvalArgument(expr, "$1", eval, env) ==
+ EvalArgument(expr, "$2", eval, env)};
+}
+
+auto AddExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ return eval(expr["$1"], env) + eval(expr["$2"], env);
+}
+
+auto ChangeEndingExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto name = eval(expr->Get("$1", ""s), env);
+ auto ending = eval(expr->Get("ending", ""s), env);
+ return ChangeEndingTo(name, ending);
+}
+
+auto JoinExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto list = eval(expr->Get("$1", list_t{}), env);
+ auto separator = eval(expr->Get("separator", ""s), env);
+ return Join(list, separator->String());
+}
+
+auto JoinCmdExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto const& list = eval(expr->Get("$1", list_t{}), env);
+ return Join</*kDoQuote=*/true>(list, " ");
+}
+
+auto JsonEncodeExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto const& value = eval(expr->Get("$1", list_t{}), env);
+ return ExpressionPtr{
+ value->ToJson(Expression::JsonMode::NullForNonJson).dump()};
+}
+
+auto EscapeCharsExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto string = eval(expr->Get("$1", ""s), env);
+ auto chars = eval(expr->Get("chars", ""s), env);
+ auto escape_prefix = eval(expr->Get("escape_prefix", "\\"s), env);
+ std::stringstream ss{};
+ std::for_each(
+ string->String().begin(), string->String().end(), [&](auto const& c) {
+ auto do_escape = chars->String().find(c) != std::string::npos;
+ ss << (do_escape ? escape_prefix->String() : "") << c;
+ });
+ return ExpressionPtr{ss.str()};
+}
+
+auto LookupExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto k = eval(expr["key"], env);
+ auto d = eval(expr["map"], env);
+ if (not k->IsString()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "Key expected to be string but found {}.", k->ToString())};
+ }
+ if (not d->IsMap()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "Map expected to be mapping but found {}.", d->ToString())};
+ }
+ auto lookup = Expression::kNone;
+ if (d->Map().contains(k->String())) {
+ lookup = d->Map().at(k->String());
+ }
+ if (lookup->IsNone()) {
+ lookup = eval(expr->Get("default", Expression::none_t()), env);
+ }
+ return lookup;
+}
+
+auto EmptyMapExpr(SubExprEvaluator&& /*eval*/,
+ ExpressionPtr const& /*expr*/,
+ Configuration const &
+ /*env*/) -> ExpressionPtr {
+ return Expression::kEmptyMap;
+}
+
+auto SingletonMapExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto key = EvalArgument(expr, "key", eval, env);
+ auto value = EvalArgument(expr, "value", eval, env);
+ return ExpressionPtr{Expression::map_t{key->String(), value}};
+}
+
+auto ToSubdirExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto d = eval(expr["$1"], env);
+ auto s = eval(expr->Get("subdir", "."s), env);
+ auto flat = ValueIsTrue(eval(expr->Get("flat", false), env));
+ std::filesystem::path subdir{s->String()};
+ auto result = Expression::map_t::underlying_map_t{};
+ if (flat) {
+ for (auto const& el : d->Map()) {
+ std::filesystem::path k{el.first};
+ auto new_path = subdir / k.filename();
+ if (result.contains(new_path) && !(result[new_path] == el.second)) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "Flat staging of {} to subdir {} conflicts on path {}",
+ d->ToString(),
+ subdir.string(),
+ new_path.string())};
+ }
+ result[new_path] = el.second;
+ }
+ }
+ else {
+ for (auto const& el : d->Map()) {
+ result[(subdir / el.first).string()] = el.second;
+ }
+ }
+ return ExpressionPtr{Expression::map_t{result}};
+}
+
+auto ForeachExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto range_list = eval(expr->Get("range", list_t{}), env);
+ if (range_list->List().empty()) {
+ return Expression::kEmptyList;
+ }
+ auto const& var = expr->Get("var", "_"s);
+ auto const& body = expr->Get("body", list_t{});
+ auto result = Expression::list_t{};
+ result.reserve(range_list->List().size());
+ std::transform(range_list->List().begin(),
+ range_list->List().end(),
+ std::back_inserter(result),
+ [&](auto const& x) {
+ return eval(body, env.Update(var->String(), x));
+ });
+ return ExpressionPtr{result};
+}
+
+auto ForeachMapExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto range_map = eval(expr->Get("range", Expression::kEmptyMapExpr), env);
+ if (range_map->Map().empty()) {
+ return Expression::kEmptyList;
+ }
+ auto const& var = expr->Get("var_key", "_"s);
+ auto const& var_val = expr->Get("var_val", "$_"s);
+ auto const& body = expr->Get("body", list_t{});
+ auto result = Expression::list_t{};
+ result.reserve(range_map->Map().size());
+ std::transform(range_map->Map().begin(),
+ range_map->Map().end(),
+ std::back_inserter(result),
+ [&](auto const& it) {
+ return eval(body,
+ env.Update(var->String(), it.first)
+ .Update(var_val->String(), it.second));
+ });
+ return ExpressionPtr{result};
+}
+
+auto FoldLeftExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto const& var = expr->Get("var", "_"s);
+ auto const& accum_var = expr->Get("accum_var", "$1"s);
+ auto range_list = eval(expr["range"], env);
+ auto val = eval(expr->Get("start", list_t{}), env);
+ auto const& body = expr->Get("body", list_t{});
+ for (auto const& x : range_list->List()) {
+ val = eval(
+ body, env.Update({{var->String(), x}, {accum_var->String(), val}}));
+ }
+ return val;
+}
+
+auto LetExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto const& bindings = expr->At("bindings");
+ auto new_env = env;
+ if (bindings) {
+ if (not bindings->get()->IsList()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "bindings in let* has to be a list of pairs, but found {}",
+ bindings->get()->ToString())};
+ }
+ int pos = -1;
+ for (const auto& binding : bindings->get()->List()) {
+ ++pos;
+ if (not binding->IsList() or binding->List().size() != 2) {
+ throw Evaluator::EvaluationError{
+ fmt::format("bindings in let* has to be a list of pairs, "
+ "but found entry {}",
+ binding->ToString())};
+ }
+ auto const& x_exp = binding[0];
+ if (not x_exp->IsString()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("variable names in let* have to be strings, "
+ "but found binding entry {}",
+ binding->ToString())};
+ }
+ ExpressionPtr val;
+ try {
+ val = eval(binding[1], new_env);
+ } catch (Evaluator::EvaluationError const& ex) {
+ throw Evaluator::EvaluationError::WhileEval(
+ fmt::format("Evaluating entry {} in bindings, binding {}:",
+ pos,
+ x_exp->ToString()),
+ ex);
+ } catch (std::exception const& ex) {
+ throw Evaluator::EvaluationError::WhileEvaluating(
+ fmt::format("Evaluating entry {} in bindings, binding {}:",
+ pos,
+ x_exp->ToString()),
+ ex);
+ }
+ new_env = new_env.Update(x_exp->String(), val);
+ }
+ }
+ auto const& body = expr->Get("body", map_t{});
+ try {
+ return eval(body, new_env);
+ } catch (Evaluator::EvaluationError const& ex) {
+ throw Evaluator::EvaluationError::WhileEval("Evaluating the body:", ex);
+ } catch (std::exception const& ex) {
+ throw Evaluator::EvaluationError::WhileEvaluating(
+ "Evaluating the body:", ex);
+ }
+}
+
+auto ConcatTargetNameExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto p1 = eval(expr->Get("$1", ""s), env);
+ auto p2 = eval(expr->Get("$2", ""s), env);
+ return ConcatTargetName(p1, Join(p2, ""));
+}
+
+auto ContextExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ try {
+ return eval(expr->Get("$1", Expression::kNone), env);
+ } catch (std::exception const& ex) {
+ auto msg_expr = expr->Get("msg", map_t{});
+ std::string context{};
+ try {
+ auto msg_val = eval(msg_expr, env);
+ context = msg_val->ToString();
+ } catch (std::exception const&) {
+ context = "[non evaluating term] " + msg_expr->ToString();
+ }
+ std::stringstream ss{};
+ ss << "In Context " << context << std::endl;
+ ss << ex.what();
+ throw Evaluator::EvaluationError(ss.str(), true, true);
+ }
+}
+
+auto DisjointUnionExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto argument = EvalArgument(expr, "$1", eval, env);
+ try {
+ return Union</*kDisjoint=*/true>(argument);
+ } catch (std::exception const& ex) {
+ auto msg_expr = expr->Map().Find("msg");
+ if (not msg_expr) {
+ throw Evaluator::EvaluationError::WhileEvaluating(
+ fmt::format("Having evaluted the argument to {}:",
+ argument->ToString()),
+ ex);
+ }
+ std::string msg;
+ try {
+ auto msg_val = eval(msg_expr->get(), env);
+ msg = msg_val->ToString();
+ } catch (std::exception const&) {
+ msg = "[non evaluating term] " + msg_expr->get()->ToString();
+ }
+ std::stringstream ss{};
+ ss << msg << std::endl;
+ ss << "Reason: " << ex.what() << std::endl;
+ ss << "The argument of the union was " << argument->ToString();
+ throw Evaluator::EvaluationError(ss.str(), false, true);
+ }
+}
+
+auto FailExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto msg = eval(expr->Get("msg", Expression::kNone), env);
+ throw Evaluator::EvaluationError(
+ msg->ToString(), false, /* user error*/ true);
+}
+
+auto AssertNonEmptyExpr(SubExprEvaluator&& eval,
+ ExpressionPtr const& expr,
+ Configuration const& env) -> ExpressionPtr {
+ auto val = eval(expr["$1"], env);
+ if ((val->IsString() and (not val->String().empty())) or
+ (val->IsList() and (not val->List().empty())) or
+ (val->IsMap() and (not val->Map().empty()))) {
+ return val;
+ }
+ auto msg_expr = expr->Get("msg", Expression::kNone);
+ std::string msg;
+ try {
+ auto msg_val = eval(msg_expr, env);
+ msg = msg_val->ToString();
+ } catch (std::exception const&) {
+ msg = "[non evaluating term] " + msg_expr->ToString();
+ }
+ std::stringstream ss{};
+ ss << msg << std::endl;
+ ss << "Expected non-empty value but found: " << val->ToString();
+ throw Evaluator::EvaluationError(ss.str(), false, true);
+}
+
+auto built_in_functions =
+ FunctionMap::MakePtr({{"var", VarExpr},
+ {"if", IfExpr},
+ {"cond", CondExpr},
+ {"case", CaseExpr},
+ {"case*", SeqCaseExpr},
+ {"fail", FailExpr},
+ {"assert_non_empty", AssertNonEmptyExpr},
+ {"context", ContextExpr},
+ {"==", EqualExpr},
+ {"and", AndExpr},
+ {"or", OrExpr},
+ {"+", AddExpr},
+ {"++", UnaryExpr(Flatten)},
+ {"nub_right", UnaryExpr(NubRight)},
+ {"change_ending", ChangeEndingExpr},
+ {"basename", UnaryExpr(BaseName)},
+ {"join", JoinExpr},
+ {"join_cmd", JoinCmdExpr},
+ {"json_encode", JsonEncodeExpr},
+ {"escape_chars", EscapeCharsExpr},
+ {"keys", UnaryExpr(Keys)},
+ {"values", UnaryExpr(Values)},
+ {"lookup", LookupExpr},
+ {"empty_map", EmptyMapExpr},
+ {"singleton_map", SingletonMapExpr},
+ {"disjoint_map_union", DisjointUnionExpr},
+ {"map_union", UnaryExpr([](auto const& exp) {
+ return Union</*kDisjoint=*/false>(exp);
+ })},
+ {"to_subdir", ToSubdirExpr},
+ {"foreach", ForeachExpr},
+ {"foreach_map", ForeachMapExpr},
+ {"foldl", FoldLeftExpr},
+ {"let*", LetExpr},
+ {"concat_target_name", ConcatTargetNameExpr}});
+
+} // namespace
+
+auto Evaluator::EvaluationError::WhileEvaluating(ExpressionPtr const& expr,
+ Configuration const& env,
+ std::exception const& ex)
+ -> Evaluator::EvaluationError {
+ std::stringstream ss{};
+ ss << "* ";
+ if (expr->IsMap() and expr->Map().contains("type") and
+ expr["type"]->IsString()) {
+ ss << expr["type"]->ToString() << "-expression ";
+ }
+ ss << expr->ToString() << std::endl;
+ ss << " environment " << std::endl;
+ ss << env.Enumerate(" - ", kLineWidth) << std::endl;
+ ss << ex.what();
+ return EvaluationError{ss.str(), true /* while_eval */};
+}
+
+auto Evaluator::EvaluationError::WhileEval(ExpressionPtr const& expr,
+ Configuration const& env,
+ Evaluator::EvaluationError const& ex)
+ -> Evaluator::EvaluationError {
+ if (ex.UserContext()) {
+ return ex;
+ }
+ return Evaluator::EvaluationError::WhileEvaluating(expr, env, ex);
+}
+
+auto Evaluator::EvaluationError::WhileEvaluating(const std::string& where,
+ std::exception const& ex)
+ -> Evaluator::EvaluationError {
+ std::stringstream ss{};
+ ss << where << std::endl;
+ ss << ex.what();
+ return EvaluationError{ss.str(), true /* while_eval */};
+}
+
+auto Evaluator::EvaluationError::WhileEval(const std::string& where,
+ Evaluator::EvaluationError const& ex)
+ -> Evaluator::EvaluationError {
+ if (ex.UserContext()) {
+ return ex;
+ }
+ return Evaluator::EvaluationError::WhileEvaluating(where, ex);
+}
+
+auto Evaluator::EvaluateExpression(
+ ExpressionPtr const& expr,
+ Configuration const& env,
+ FunctionMapPtr const& provider_functions,
+ std::function<void(std::string const&)> const& logger,
+ std::function<void(void)> const& note_user_context) noexcept
+ -> ExpressionPtr {
+ std::stringstream ss{};
+ try {
+ return Evaluate(
+ expr,
+ env,
+ FunctionMap::MakePtr(built_in_functions, provider_functions));
+ } catch (EvaluationError const& ex) {
+ if (ex.UserContext()) {
+ try {
+ note_user_context();
+ } catch (...) {
+ // should not throw
+ }
+ }
+ else {
+ if (ex.WhileEvaluation()) {
+ ss << "Expression evaluation traceback (most recent call last):"
+ << std::endl;
+ }
+ }
+ ss << ex.what();
+ } catch (std::exception const& ex) {
+ ss << ex.what();
+ }
+ try {
+ logger(ss.str());
+ } catch (...) {
+ // should not throw
+ }
+ return ExpressionPtr{nullptr};
+}
+
+auto Evaluator::Evaluate(ExpressionPtr const& expr,
+ Configuration const& env,
+ FunctionMapPtr const& functions) -> ExpressionPtr {
+ try {
+ if (expr->IsList()) {
+ if (expr->List().empty()) {
+ return expr;
+ }
+ auto list = Expression::list_t{};
+ std::transform(
+ expr->List().cbegin(),
+ expr->List().cend(),
+ std::back_inserter(list),
+ [&](auto const& e) { return Evaluate(e, env, functions); });
+ return ExpressionPtr{list};
+ }
+ if (not expr->IsMap()) {
+ return expr;
+ }
+ if (not expr->Map().contains("type")) {
+ throw EvaluationError{fmt::format(
+ "Object without keyword 'type': {}", expr->ToString())};
+ }
+ auto const& type = expr["type"]->String();
+ auto func = functions->Find(type);
+ if (func) {
+ return func->get()(
+ [&functions](auto const& subexpr, auto const& subenv) {
+ return Evaluator::Evaluate(subexpr, subenv, functions);
+ },
+ expr,
+ env);
+ }
+ throw EvaluationError{
+ fmt::format("Unknown syntactical construct {}", type)};
+ } catch (EvaluationError const& ex) {
+ throw EvaluationError::WhileEval(expr, env, ex);
+ } catch (std::exception const& ex) {
+ throw EvaluationError::WhileEvaluating(expr, env, ex);
+ }
+}
diff --git a/src/buildtool/build_engine/expression/evaluator.hpp b/src/buildtool/build_engine/expression/evaluator.hpp
new file mode 100644
index 00000000..b4cd5979
--- /dev/null
+++ b/src/buildtool/build_engine/expression/evaluator.hpp
@@ -0,0 +1,76 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_EVALUATOR_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_EVALUATOR_HPP
+
+#include <exception>
+#include <string>
+
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/buildtool/build_engine/expression/function_map.hpp"
+
+class Configuration;
+
+class Evaluator {
+ public:
+ class EvaluationError : public std::exception {
+ public:
+ explicit EvaluationError(std::string const& msg,
+ bool while_eval = false,
+ bool user_context = false) noexcept
+ : msg_{(while_eval ? ""
+ : (user_context ? "UserError: "
+ : "EvaluationError: ")) +
+ msg},
+ while_eval_{while_eval},
+ user_context_{user_context} {}
+ [[nodiscard]] auto what() const noexcept -> char const* final {
+ return msg_.c_str();
+ }
+
+ [[nodiscard]] auto WhileEvaluation() const -> bool {
+ return while_eval_;
+ }
+
+ [[nodiscard]] auto UserContext() const -> bool { return user_context_; }
+
+ [[nodiscard]] static auto WhileEvaluating(ExpressionPtr const& expr,
+ Configuration const& env,
+ std::exception const& ex)
+ -> EvaluationError;
+
+ [[nodiscard]] static auto WhileEval(ExpressionPtr const& expr,
+ Configuration const& env,
+ EvaluationError const& ex)
+ -> EvaluationError;
+
+ [[nodiscard]] static auto WhileEvaluating(const std::string& where,
+ std::exception const& ex)
+ -> Evaluator::EvaluationError;
+
+ [[nodiscard]] static auto WhileEval(const std::string& where,
+ EvaluationError const& ex)
+ -> Evaluator::EvaluationError;
+
+ private:
+ std::string msg_;
+ bool while_eval_;
+ bool user_context_;
+ };
+
+ // Exception-free evaluation of expression
+ [[nodiscard]] static auto EvaluateExpression(
+ ExpressionPtr const& expr,
+ Configuration const& env,
+ FunctionMapPtr const& provider_functions,
+ std::function<void(std::string const&)> const& logger,
+ std::function<void(void)> const& note_user_context = []() {}) noexcept
+ -> ExpressionPtr;
+
+ private:
+ constexpr static std::size_t kLineWidth = 80;
+ [[nodiscard]] static auto Evaluate(ExpressionPtr const& expr,
+ Configuration const& env,
+ FunctionMapPtr const& functions)
+ -> ExpressionPtr;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_EVALUATOR_HPP
diff --git a/src/buildtool/build_engine/expression/expression.cpp b/src/buildtool/build_engine/expression/expression.cpp
new file mode 100644
index 00000000..5a161468
--- /dev/null
+++ b/src/buildtool/build_engine/expression/expression.cpp
@@ -0,0 +1,249 @@
+#include "src/buildtool/build_engine/expression/expression.hpp"
+
+#include <exception>
+#include <optional>
+#include <sstream>
+#include <string>
+#include <type_traits>
+
+#include "fmt/core.h"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/expression/evaluator.hpp"
+#include "src/buildtool/logging/logger.hpp"
+#include "src/utils/cpp/json.hpp"
+
+auto Expression::operator[](
+ std::string const& key) const& -> ExpressionPtr const& {
+ auto value = Map().Find(key);
+ if (value) {
+ return value->get();
+ }
+ throw ExpressionTypeError{
+ fmt::format("Map does not contain key '{}'.", key)};
+}
+
+auto Expression::operator[](std::string const& key) && -> ExpressionPtr {
+ auto value = std::move(*this).Map().Find(key);
+ if (value) {
+ return std::move(*value);
+ }
+ throw ExpressionTypeError{
+ fmt::format("Map does not contain key '{}'.", key)};
+}
+
+auto Expression::operator[](
+ ExpressionPtr const& key) const& -> ExpressionPtr const& {
+ return (*this)[key->String()];
+}
+
+auto Expression::operator[](ExpressionPtr const& key) && -> ExpressionPtr {
+ return std::move(*this)[key->String()];
+}
+
+auto Expression::operator[](size_t pos) const& -> ExpressionPtr const& {
+ if (pos < List().size()) {
+ return List().at(pos);
+ }
+ throw ExpressionTypeError{
+ fmt::format("List pos '{}' is out of bounds.", pos)};
+}
+
+auto Expression::operator[](size_t pos) && -> ExpressionPtr {
+ auto&& list = std::move(*this).List();
+ if (pos < list.size()) {
+ return list.at(pos);
+ }
+ throw ExpressionTypeError{
+ fmt::format("List pos '{}' is out of bounds.", pos)};
+}
+
+auto Expression::ToJson(Expression::JsonMode mode) const -> nlohmann::json {
+ if (IsBool()) {
+ return Bool();
+ }
+ if (IsNumber()) {
+ return Number();
+ }
+ if (IsString()) {
+ return String();
+ }
+ if (IsArtifact() and mode != JsonMode::NullForNonJson) {
+ return Artifact().ToJson();
+ }
+ if (IsResult() and mode != JsonMode::NullForNonJson) {
+ auto const& result = Result();
+ return Expression{map_t{{{"artifact_stage", result.artifact_stage},
+ {"runfiles", result.runfiles},
+ {"provides", result.provides}}}}
+ .ToJson(JsonMode::SerializeAllButNodes);
+ }
+ if (IsNode() and mode != JsonMode::NullForNonJson) {
+ switch (mode) {
+ case JsonMode::SerializeAll:
+ return Node().ToJson();
+ case JsonMode::SerializeAllButNodes:
+ return {{"type", "NODE"}, {"id", ToIdentifier()}};
+ default:
+ break;
+ }
+ }
+ if (IsList()) {
+ auto json = nlohmann::json::array();
+ auto const& list = List();
+ std::transform(list.begin(),
+ list.end(),
+ std::back_inserter(json),
+ [mode](auto const& e) { return e->ToJson(mode); });
+ return json;
+ }
+ if (IsMap()) {
+ auto json = nlohmann::json::object();
+ auto const& map = Value<map_t>()->get();
+ std::for_each(map.begin(), map.end(), [&](auto const& p) {
+ json.emplace(p.first, p.second->ToJson(mode));
+ });
+ return json;
+ }
+ if (IsName() and mode != JsonMode::NullForNonJson) {
+ return Name().ToJson();
+ }
+ return nlohmann::json{};
+}
+
+auto Expression::IsCacheable() const -> bool {
+ // Must be updated whenever we add a new non-cacheable value
+ if (IsName()) {
+ return false;
+ }
+ if (IsResult()) {
+ return Result().is_cacheable;
+ }
+ if (IsNode()) {
+ return Node().IsCacheable();
+ }
+ if (IsList()) {
+ for (auto const& entry : List()) {
+ if (not entry->IsCacheable()) {
+ return false;
+ }
+ }
+ }
+ if (IsMap()) {
+ for (auto const& [key, entry] : Map()) {
+ if (not entry->IsCacheable()) {
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+auto Expression::ToString() const -> std::string {
+ return ToJson().dump();
+}
+
+auto Expression::ToHash() const noexcept -> std::string {
+ if (hash_.load() == nullptr) {
+ if (not hash_loading_.exchange(true)) {
+ hash_ = std::make_shared<std::string>(ComputeHash());
+ hash_.notify_all();
+ }
+ else {
+ hash_.wait(nullptr);
+ }
+ }
+ return *hash_.load();
+}
+
+auto Expression::FromJson(nlohmann::json const& json) noexcept
+ -> ExpressionPtr {
+ if (json.is_null()) {
+ return ExpressionPtr{none_t{}};
+ }
+ try { // try-catch because json.get<>() could throw, although checked
+ if (json.is_boolean()) {
+ return ExpressionPtr{json.get<bool>()};
+ }
+ if (json.is_number()) {
+ return ExpressionPtr{json.get<number_t>()};
+ }
+ if (json.is_string()) {
+ return ExpressionPtr{std::string{json.get<std::string>()}};
+ }
+ if (json.is_array()) {
+ auto l = Expression::list_t{};
+ l.reserve(json.size());
+ std::transform(json.begin(),
+ json.end(),
+ std::back_inserter(l),
+ [](auto const& j) { return FromJson(j); });
+ return ExpressionPtr{l};
+ }
+ if (json.is_object()) {
+ auto m = Expression::map_t::underlying_map_t{};
+ for (auto& el : json.items()) {
+ m.emplace(el.key(), FromJson(el.value()));
+ }
+ return ExpressionPtr{Expression::map_t{m}};
+ }
+ } catch (...) {
+ gsl_EnsuresAudit(false); // ensure that the try-block never throws
+ }
+ return ExpressionPtr{nullptr};
+}
+
+template <size_t kIndex>
+auto Expression::TypeStringForIndex() const noexcept -> std::string {
+ using var_t = decltype(data_);
+ if (kIndex == data_.index()) {
+ return TypeToString<std::variant_alternative_t<kIndex, var_t>>();
+ }
+ constexpr auto size = std::variant_size_v<var_t>;
+ if constexpr (kIndex < size - 1) {
+ return TypeStringForIndex<kIndex + 1>();
+ }
+ return TypeToString<std::variant_alternative_t<size - 1, var_t>>();
+}
+
+auto Expression::TypeString() const noexcept -> std::string {
+ return TypeStringForIndex();
+}
+
+auto Expression::ComputeHash() const noexcept -> std::string {
+ auto hash = std::string{};
+ if (IsNone() or IsBool() or IsNumber() or IsString() or IsArtifact() or
+ IsResult() or IsNode() or IsName()) {
+ // just hash the JSON representation, but prepend "@" for artifact,
+ // "=" for result, "#" for node, and "$" for name.
+ std::string prefix{
+ IsArtifact()
+ ? "@"
+ : IsResult() ? "=" : IsNode() ? "#" : IsName() ? "$" : ""};
+ hash = hash_gen_.Run(prefix + ToString()).Bytes();
+ }
+ else {
+ auto hasher = hash_gen_.IncrementalHasher();
+ if (IsList()) {
+ auto list = Value<Expression::list_t>();
+ hasher.Update("[");
+ for (auto const& el : list->get()) {
+ hasher.Update(el->ToHash());
+ }
+ }
+ else if (IsMap()) {
+ auto map = Value<Expression::map_t>();
+ hasher.Update("{");
+ for (auto const& el : map->get()) {
+ hasher.Update(hash_gen_.Run(el.first).Bytes());
+ hasher.Update(el.second->ToHash());
+ }
+ }
+ auto digest = std::move(hasher).Finalize();
+ if (not digest) {
+ Logger::Log(LogLevel::Error, "Failed to finalize hash.");
+ std::terminate();
+ }
+ hash = digest->Bytes();
+ }
+ return hash;
+}
diff --git a/src/buildtool/build_engine/expression/expression.hpp b/src/buildtool/build_engine/expression/expression.hpp
new file mode 100644
index 00000000..ffcd01c8
--- /dev/null
+++ b/src/buildtool/build_engine/expression/expression.hpp
@@ -0,0 +1,380 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_EXPRESSION_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_EXPRESSION_HPP
+
+#include <exception>
+#include <functional>
+#include <memory>
+#include <optional>
+#include <string>
+#include <type_traits>
+#include <variant>
+#include <vector>
+
+#include "fmt/core.h"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name_data.hpp"
+#include "src/buildtool/build_engine/expression/expression_ptr.hpp"
+#include "src/buildtool/build_engine/expression/function_map.hpp"
+#include "src/buildtool/build_engine/expression/linked_map.hpp"
+#include "src/buildtool/build_engine/expression/target_node.hpp"
+#include "src/buildtool/build_engine/expression/target_result.hpp"
+#include "src/buildtool/common/artifact_description.hpp"
+#include "src/buildtool/crypto/hash_generator.hpp"
+#include "src/utils/cpp/atomic.hpp"
+#include "src/utils/cpp/hex_string.hpp"
+#include "src/utils/cpp/json.hpp"
+
+class Expression {
+ friend auto operator+(Expression const& /*lhs*/, Expression const & /*rhs*/)
+ -> Expression;
+
+ public:
+ using none_t = std::monostate;
+ using number_t = double;
+ using artifact_t = ArtifactDescription;
+ using result_t = TargetResult;
+ using node_t = TargetNode;
+ using list_t = std::vector<ExpressionPtr>;
+ using map_t = LinkedMap<std::string, ExpressionPtr, ExpressionPtr>;
+ using name_t = BuildMaps::Base::EntityName;
+
+ template <class T, size_t kIndex = 0>
+ static consteval auto IsValidType() -> bool {
+ if constexpr (kIndex < std::variant_size_v<decltype(data_)>) {
+ return std::is_same_v<
+ T,
+ std::variant_alternative_t<kIndex, decltype(data_)>> or
+ IsValidType<T, kIndex + 1>();
+ }
+ return false;
+ }
+
+ class ExpressionTypeError : public std::exception {
+ public:
+ explicit ExpressionTypeError(std::string const& msg) noexcept
+ : msg_{"ExpressionTypeError: " + msg} {}
+ [[nodiscard]] auto what() const noexcept -> char const* final {
+ return msg_.c_str();
+ }
+
+ private:
+ std::string msg_;
+ };
+
+ Expression() noexcept = default;
+ ~Expression() noexcept = default;
+ Expression(Expression const& other) noexcept
+ : data_{other.data_}, hash_{other.hash_.load()} {}
+ Expression(Expression&& other) noexcept
+ : data_{std::move(other.data_)}, hash_{other.hash_.load()} {}
+ auto operator=(Expression const& other) noexcept -> Expression& {
+ if (this != &other) {
+ data_ = other.data_;
+ }
+ hash_ = other.hash_.load();
+ return *this;
+ }
+ auto operator=(Expression&& other) noexcept -> Expression& {
+ data_ = std::move(other.data_);
+ hash_ = other.hash_.load();
+ return *this;
+ }
+
+ template <class T>
+ requires(IsValidType<std::remove_cvref_t<T>>())
+ // NOLINTNEXTLINE(bugprone-forwarding-reference-overload)
+ explicit Expression(T&& data) noexcept
+ : data_{std::forward<T>(data)} {}
+
+ [[nodiscard]] auto IsNone() const noexcept -> bool { return IsA<none_t>(); }
+ [[nodiscard]] auto IsBool() const noexcept -> bool { return IsA<bool>(); }
+ [[nodiscard]] auto IsNumber() const noexcept -> bool {
+ return IsA<number_t>();
+ }
+ [[nodiscard]] auto IsString() const noexcept -> bool {
+ return IsA<std::string>();
+ }
+ [[nodiscard]] auto IsName() const noexcept -> bool { return IsA<name_t>(); }
+ [[nodiscard]] auto IsArtifact() const noexcept -> bool {
+ return IsA<artifact_t>();
+ }
+ [[nodiscard]] auto IsResult() const noexcept -> bool {
+ return IsA<result_t>();
+ }
+ [[nodiscard]] auto IsNode() const noexcept -> bool { return IsA<node_t>(); }
+ [[nodiscard]] auto IsList() const noexcept -> bool { return IsA<list_t>(); }
+ [[nodiscard]] auto IsMap() const noexcept -> bool { return IsA<map_t>(); }
+
+ [[nodiscard]] auto Bool() const -> bool { return Cast<bool>(); }
+ [[nodiscard]] auto Number() const -> number_t { return Cast<number_t>(); }
+ [[nodiscard]] auto Name() const -> name_t { return Cast<name_t>(); }
+ [[nodiscard]] auto String() const& -> std::string const& {
+ return Cast<std::string>();
+ }
+ [[nodiscard]] auto String() && -> std::string {
+ return std::move(*this).Cast<std::string>();
+ }
+ [[nodiscard]] auto Artifact() const& -> artifact_t const& {
+ return Cast<artifact_t>();
+ }
+ [[nodiscard]] auto Artifact() && -> artifact_t {
+ return std::move(*this).Cast<artifact_t>();
+ }
+ [[nodiscard]] auto Result() const& -> result_t const& {
+ return Cast<result_t>();
+ }
+ [[nodiscard]] auto Result() && -> result_t {
+ return std::move(*this).Cast<result_t>();
+ }
+ [[nodiscard]] auto Node() const& -> node_t const& { return Cast<node_t>(); }
+ [[nodiscard]] auto Node() && -> node_t {
+ return std::move(*this).Cast<node_t>();
+ }
+ [[nodiscard]] auto List() const& -> list_t const& { return Cast<list_t>(); }
+ [[nodiscard]] auto List() && -> list_t {
+ return std::move(*this).Cast<list_t>();
+ }
+ [[nodiscard]] auto Map() const& -> map_t const& { return Cast<map_t>(); }
+ [[nodiscard]] auto Map() && -> map_t {
+ return std::move(*this).Cast<map_t>();
+ }
+
+ [[nodiscard]] auto At(std::string const& key)
+ const& -> std::optional<std::reference_wrapper<ExpressionPtr const>> {
+ auto value = Map().Find(key);
+ if (value) {
+ return value;
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] auto At(
+ std::string const& key) && -> std::optional<ExpressionPtr> {
+ auto value = std::move(*this).Map().Find(key);
+ if (value) {
+ return std::move(*value);
+ }
+ return std::nullopt;
+ }
+
+ template <class T>
+ requires(IsValidType<std::remove_cvref_t<T>>() or
+ std::is_same_v<std::remove_cvref_t<T>, ExpressionPtr>)
+ [[nodiscard]] auto Get(std::string const& key, T&& default_value) const
+ -> ExpressionPtr {
+ auto value = At(key);
+ if (value) {
+ return value->get();
+ }
+ if constexpr (std::is_same_v<std::remove_cvref_t<T>, ExpressionPtr>) {
+ return std::forward<T>(default_value);
+ }
+ else {
+ return ExpressionPtr{std::forward<T>(default_value)};
+ }
+ }
+
+ template <class T>
+ requires(IsValidType<T>()) [[nodiscard]] auto Value() const& noexcept
+ -> std::optional<std::reference_wrapper<T const>> {
+ if (GetIndexOf<T>() == data_.index()) {
+ return std::make_optional(std::ref(std::get<T>(data_)));
+ }
+ return std::nullopt;
+ }
+
+ template <class T>
+ requires(IsValidType<T>()) [[nodiscard]] auto Value() && noexcept
+ -> std::optional<T> {
+ if (GetIndexOf<T>() == data_.index()) {
+ return std::make_optional(std::move(std::get<T>(data_)));
+ }
+ return std::nullopt;
+ }
+
+ template <class T>
+ [[nodiscard]] auto operator==(T const& other) const noexcept -> bool {
+ if constexpr (std::is_same_v<T, Expression>) {
+ return (&data_ == &other.data_) or (ToHash() == other.ToHash());
+ }
+ else {
+ return IsValidType<T>() and (GetIndexOf<T>() == data_.index()) and
+ ((static_cast<void const*>(&data_) ==
+ static_cast<void const*>(&other)) or
+ (std::get<T>(data_) == other));
+ }
+ }
+
+ template <class T>
+ [[nodiscard]] auto operator!=(T const& other) const noexcept -> bool {
+ return !(*this == other);
+ }
+ [[nodiscard]] auto operator[](
+ std::string const& key) const& -> ExpressionPtr const&;
+ [[nodiscard]] auto operator[](std::string const& key) && -> ExpressionPtr;
+ [[nodiscard]] auto operator[](
+ ExpressionPtr const& key) const& -> ExpressionPtr const&;
+ [[nodiscard]] auto operator[](ExpressionPtr const& key) && -> ExpressionPtr;
+ [[nodiscard]] auto operator[](size_t pos) const& -> ExpressionPtr const&;
+ [[nodiscard]] auto operator[](size_t pos) && -> ExpressionPtr;
+
+ enum class JsonMode { SerializeAll, SerializeAllButNodes, NullForNonJson };
+
+ [[nodiscard]] auto ToJson(JsonMode mode = JsonMode::SerializeAll) const
+ -> nlohmann::json;
+ [[nodiscard]] auto IsCacheable() const -> bool;
+ [[nodiscard]] auto ToString() const -> std::string;
+ [[nodiscard]] auto ToHash() const noexcept -> std::string;
+ [[nodiscard]] auto ToIdentifier() const noexcept -> std::string {
+ return ToHexString(ToHash());
+ }
+
+ [[nodiscard]] static auto FromJson(nlohmann::json const& json) noexcept
+ -> ExpressionPtr;
+
+ inline static ExpressionPtr const kNone = Expression::FromJson("null"_json);
+ inline static ExpressionPtr const kEmptyMap =
+ Expression::FromJson("{}"_json);
+ inline static ExpressionPtr const kEmptyList =
+ Expression::FromJson("[]"_json);
+ inline static ExpressionPtr const kEmptyMapExpr =
+ Expression::FromJson(R"({"type": "empty_map"})"_json);
+
+ private:
+ inline static HashGenerator const hash_gen_{
+ HashGenerator::HashType::SHA256};
+
+ std::variant<none_t,
+ bool,
+ number_t,
+ std::string,
+ name_t,
+ artifact_t,
+ result_t,
+ node_t,
+ list_t,
+ map_t>
+ data_{none_t{}};
+
+ mutable atomic_shared_ptr<std::string> hash_{};
+ mutable std::atomic<bool> hash_loading_{};
+
+ template <class T, std::size_t kIndex = 0>
+ requires(IsValidType<T>()) [[nodiscard]] static consteval auto GetIndexOf()
+ -> std::size_t {
+ static_assert(kIndex < std::variant_size_v<decltype(data_)>,
+ "kIndex out of range");
+ if constexpr (std::is_same_v<
+ T,
+ std::variant_alternative_t<kIndex,
+ decltype(data_)>>) {
+ return kIndex;
+ }
+ else {
+ return GetIndexOf<T, kIndex + 1>();
+ }
+ }
+
+ template <class T>
+ [[nodiscard]] auto IsA() const noexcept -> bool {
+ return std::holds_alternative<T>(data_);
+ }
+
+ template <class T>
+ [[nodiscard]] auto Cast() const& -> T const& {
+ if (GetIndexOf<T>() == data_.index()) {
+ return std::get<T>(data_);
+ }
+ // throw descriptive ExpressionTypeError
+ throw ExpressionTypeError{
+ fmt::format("Expression is not of type '{}' but '{}'.",
+ TypeToString<T>(),
+ TypeString())};
+ }
+
+ template <class T>
+ [[nodiscard]] auto Cast() && -> T {
+ if (GetIndexOf<T>() == data_.index()) {
+ return std::move(std::get<T>(data_));
+ }
+ // throw descriptive ExpressionTypeError
+ throw ExpressionTypeError{
+ fmt::format("Expression is not of type '{}' but '{}'.",
+ TypeToString<T>(),
+ TypeString())};
+ }
+
+ template <class T>
+ requires(Expression::IsValidType<T>())
+ [[nodiscard]] static auto TypeToString() noexcept -> std::string {
+ if constexpr (std::is_same_v<T, bool>) {
+ return "bool";
+ }
+ else if constexpr (std::is_same_v<T, Expression::number_t>) {
+ return "number";
+ }
+ else if constexpr (std::is_same_v<T, Expression::name_t>) {
+ return "name";
+ }
+ else if constexpr (std::is_same_v<T, std::string>) {
+ return "string";
+ }
+ else if constexpr (std::is_same_v<T, Expression::artifact_t>) {
+ return "artifact";
+ }
+ else if constexpr (std::is_same_v<T, Expression::result_t>) {
+ return "result";
+ }
+ else if constexpr (std::is_same_v<T, Expression::node_t>) {
+ return "node";
+ }
+ else if constexpr (std::is_same_v<T, Expression::list_t>) {
+ return "list";
+ }
+ else if constexpr (std::is_same_v<T, Expression::map_t>) {
+ return "map";
+ }
+ return "none";
+ }
+
+ template <size_t kIndex = 0>
+ [[nodiscard]] auto TypeStringForIndex() const noexcept -> std::string;
+ [[nodiscard]] auto TypeString() const noexcept -> std::string;
+ [[nodiscard]] auto ComputeHash() const noexcept -> std::string;
+};
+
+[[nodiscard]] inline auto operator+(Expression const& lhs,
+ Expression const& rhs) -> Expression {
+ if (lhs.data_.index() != rhs.data_.index()) {
+ throw Expression::ExpressionTypeError{
+ fmt::format("Cannot add expressions of different type: {} != {}",
+ lhs.TypeString(),
+ rhs.TypeString())};
+ }
+ if (not lhs.IsList()) {
+ throw Expression::ExpressionTypeError{fmt::format(
+ "Cannot add expressions of type '{}'.", lhs.TypeString())};
+ }
+ auto list = Expression::list_t{};
+ auto const& llist = lhs.List();
+ auto const& rlist = rhs.List();
+ list.reserve(llist.size() + rlist.size());
+ list.insert(list.begin(), llist.begin(), llist.end());
+ list.insert(list.end(), rlist.begin(), rlist.end());
+ return Expression{list};
+}
+
+namespace std {
+template <>
+struct hash<Expression> {
+ [[nodiscard]] auto operator()(Expression const& e) const noexcept
+ -> std::size_t {
+ auto hash = std::size_t{};
+ auto bytes = e.ToHash();
+ std::memcpy(&hash, bytes.data(), std::min(sizeof(hash), bytes.size()));
+ return hash;
+ }
+};
+} // namespace std
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_EXPRESSION_HPP
diff --git a/src/buildtool/build_engine/expression/expression_ptr.cpp b/src/buildtool/build_engine/expression/expression_ptr.cpp
new file mode 100644
index 00000000..97cdb138
--- /dev/null
+++ b/src/buildtool/build_engine/expression/expression_ptr.cpp
@@ -0,0 +1,89 @@
+#include <string>
+
+#include "src/buildtool/build_engine/expression/evaluator.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+
+ExpressionPtr::ExpressionPtr() noexcept : ptr_{Expression::kNone.ptr_} {}
+
+auto ExpressionPtr::operator*() && -> Expression {
+ return *ptr_;
+}
+
+auto ExpressionPtr::operator[](
+ std::string const& key) const& -> ExpressionPtr const& {
+ return (*ptr_)[key];
+}
+
+auto ExpressionPtr::operator[](std::string const& key) && -> ExpressionPtr {
+ return (*ptr_)[key];
+}
+
+auto ExpressionPtr::operator[](
+ ExpressionPtr const& key) const& -> ExpressionPtr const& {
+ return (*ptr_)[key];
+}
+
+auto ExpressionPtr::operator[](ExpressionPtr const& key) && -> ExpressionPtr {
+ return (*ptr_)[key];
+}
+
+auto ExpressionPtr::operator[](size_t pos) const& -> ExpressionPtr const& {
+ return (*ptr_)[pos];
+}
+
+auto ExpressionPtr::operator[](size_t pos) && -> ExpressionPtr {
+ return (*ptr_)[pos];
+}
+
+auto ExpressionPtr::operator<(ExpressionPtr const& other) const -> bool {
+ return ptr_->ToHash() < other.ptr_->ToHash();
+}
+
+auto ExpressionPtr::operator==(ExpressionPtr const& other) const -> bool {
+ return ptr_ == other.ptr_ or (ptr_ and other.ptr_ and *ptr_ == *other.ptr_);
+}
+
+auto ExpressionPtr::Evaluate(
+ Configuration const& env,
+ FunctionMapPtr const& functions,
+ std::function<void(std::string const&)> const& logger,
+ std::function<void(void)> const& note_user_context) const noexcept
+ -> ExpressionPtr {
+ return Evaluator::EvaluateExpression(
+ *this, env, functions, logger, note_user_context);
+}
+
+auto ExpressionPtr::IsCacheable() const noexcept -> bool {
+ return ptr_ and ptr_->IsCacheable();
+}
+
+auto ExpressionPtr::ToIdentifier() const noexcept -> std::string {
+ return ptr_ ? ptr_->ToIdentifier() : std::string{};
+}
+
+auto ExpressionPtr::ToJson() const noexcept -> nlohmann::json {
+ return ptr_ ? ptr_->ToJson() : nlohmann::json::object();
+}
+
+auto ExpressionPtr::IsNotNull() const noexcept -> bool {
+ // ExpressionPtr is nullptr in error case and none_t default empty case.
+ return static_cast<bool>(ptr_) and not(ptr_->IsNone());
+}
+
+auto ExpressionPtr::LinkedMap() const& -> ExpressionPtr::linked_map_t const& {
+ return ptr_->Map();
+}
+
+auto ExpressionPtr::Make(linked_map_t&& map) -> ExpressionPtr {
+ return ExpressionPtr{std::move(map)};
+}
+
+auto operator+(ExpressionPtr const& lhs, ExpressionPtr const& rhs)
+ -> ExpressionPtr {
+ return ExpressionPtr{*lhs + *rhs};
+}
+
+auto std::hash<ExpressionPtr>::operator()(ExpressionPtr const& p) const noexcept
+ -> std::size_t {
+ return std::hash<Expression>{}(*p);
+}
diff --git a/src/buildtool/build_engine/expression/expression_ptr.hpp b/src/buildtool/build_engine/expression/expression_ptr.hpp
new file mode 100644
index 00000000..8cc26c50
--- /dev/null
+++ b/src/buildtool/build_engine/expression/expression_ptr.hpp
@@ -0,0 +1,95 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_EXPRESSION_PTR_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_EXPRESSION_PTR_HPP
+
+#include <functional>
+#include <memory>
+#include <string>
+#include <type_traits>
+
+#include "nlohmann/json.hpp"
+#include "src/buildtool/build_engine/expression/function_map.hpp"
+#include "src/buildtool/build_engine/expression/linked_map.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+class Configuration;
+class Expression;
+
+class ExpressionPtr {
+ public:
+ // Initialize to nullptr
+ explicit ExpressionPtr(std::nullptr_t /*ptr*/) noexcept : ptr_{nullptr} {}
+
+ // Initialize from Expression's variant type or Expression
+ template <class T>
+ requires(not std::is_same_v<std::remove_cvref_t<T>, ExpressionPtr>)
+ // NOLINTNEXTLINE(bugprone-forwarding-reference-overload)
+ explicit ExpressionPtr(T&& data) noexcept
+ : ptr_{std::make_shared<Expression>(std::forward<T>(data))} {}
+
+ ExpressionPtr() noexcept;
+ ExpressionPtr(ExpressionPtr const&) noexcept = default;
+ ExpressionPtr(ExpressionPtr&&) noexcept = default;
+ ~ExpressionPtr() noexcept = default;
+ auto operator=(ExpressionPtr const&) noexcept -> ExpressionPtr& = default;
+ auto operator=(ExpressionPtr&&) noexcept -> ExpressionPtr& = default;
+
+ explicit operator bool() const { return static_cast<bool>(ptr_); }
+ [[nodiscard]] auto operator*() const& -> Expression const& { return *ptr_; }
+ [[nodiscard]] auto operator*() && -> Expression;
+ [[nodiscard]] auto operator->() const& -> Expression const* {
+ return ptr_.get();
+ }
+ [[nodiscard]] auto operator->() && -> Expression const* = delete;
+ [[nodiscard]] auto operator[](
+ std::string const& key) const& -> ExpressionPtr const&;
+ [[nodiscard]] auto operator[](std::string const& key) && -> ExpressionPtr;
+ [[nodiscard]] auto operator[](
+ ExpressionPtr const& key) const& -> ExpressionPtr const&;
+ [[nodiscard]] auto operator[](ExpressionPtr const& key) && -> ExpressionPtr;
+ [[nodiscard]] auto operator[](size_t pos) const& -> ExpressionPtr const&;
+ [[nodiscard]] auto operator[](size_t pos) && -> ExpressionPtr;
+ [[nodiscard]] auto operator<(ExpressionPtr const& other) const -> bool;
+ [[nodiscard]] auto operator==(ExpressionPtr const& other) const -> bool;
+ template <class T>
+ [[nodiscard]] auto operator==(T const& other) const -> bool {
+ return ptr_ and *ptr_ == other;
+ }
+ template <class T>
+ [[nodiscard]] auto operator!=(T const& other) const -> bool {
+ return not(*this == other);
+ }
+ [[nodiscard]] auto Evaluate(
+ Configuration const& env,
+ FunctionMapPtr const& functions,
+ std::function<void(std::string const&)> const& logger =
+ [](std::string const& error) noexcept -> void {
+ Logger::Log(LogLevel::Error, error);
+ },
+ std::function<void(void)> const& note_user_context =
+ []() noexcept -> void {}) const noexcept -> ExpressionPtr;
+
+ [[nodiscard]] auto IsCacheable() const noexcept -> bool;
+ [[nodiscard]] auto ToIdentifier() const noexcept -> std::string;
+ [[nodiscard]] auto ToJson() const noexcept -> nlohmann::json;
+
+ using linked_map_t = LinkedMap<std::string, ExpressionPtr, ExpressionPtr>;
+ [[nodiscard]] auto IsNotNull() const noexcept -> bool;
+ [[nodiscard]] auto LinkedMap() const& -> linked_map_t const&;
+ [[nodiscard]] static auto Make(linked_map_t&& map) -> ExpressionPtr;
+
+ private:
+ std::shared_ptr<Expression> ptr_;
+};
+
+[[nodiscard]] auto operator+(ExpressionPtr const& lhs, ExpressionPtr const& rhs)
+ -> ExpressionPtr;
+
+namespace std {
+template <>
+struct hash<ExpressionPtr> {
+ [[nodiscard]] auto operator()(ExpressionPtr const& p) const noexcept
+ -> std::size_t;
+};
+} // namespace std
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_EXPRESSION_PTR_HPP
diff --git a/src/buildtool/build_engine/expression/function_map.hpp b/src/buildtool/build_engine/expression/function_map.hpp
new file mode 100644
index 00000000..967d5fe0
--- /dev/null
+++ b/src/buildtool/build_engine/expression/function_map.hpp
@@ -0,0 +1,23 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_FUNCTION_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_FUNCTION_MAP_HPP
+
+#include <functional>
+#include <string>
+
+#include "src/buildtool/build_engine/expression/linked_map.hpp"
+
+class ExpressionPtr;
+class Configuration;
+
+using SubExprEvaluator =
+ std::function<ExpressionPtr(ExpressionPtr const&, Configuration const&)>;
+
+using FunctionMap =
+ LinkedMap<std::string,
+ std::function<ExpressionPtr(SubExprEvaluator&&,
+ ExpressionPtr const&,
+ Configuration const&)>>;
+
+using FunctionMapPtr = FunctionMap::Ptr;
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_FUNCTION_MAP_HPP
diff --git a/src/buildtool/build_engine/expression/linked_map.hpp b/src/buildtool/build_engine/expression/linked_map.hpp
new file mode 100644
index 00000000..5c6da558
--- /dev/null
+++ b/src/buildtool/build_engine/expression/linked_map.hpp
@@ -0,0 +1,414 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_LINKED_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_LINKED_MAP_HPP
+
+#include <algorithm>
+#include <atomic>
+#include <condition_variable>
+#include <map>
+#include <memory>
+#include <mutex>
+#include <vector>
+
+#include "fmt/core.h"
+#include "src/utils/cpp/atomic.hpp"
+#include "src/utils/cpp/hash_combine.hpp"
+
+template <class K, class V, class NextPtr>
+class LinkedMap;
+
+// Default NextPtr for LinkedMap, based on std::shared_ptr.
+template <class K, class V>
+class LinkedMapPtr {
+ using ptr_t = LinkedMapPtr<K, V>;
+ using map_t = LinkedMap<K, V, ptr_t>;
+
+ public:
+ LinkedMapPtr() noexcept = default;
+ explicit LinkedMapPtr(std::shared_ptr<map_t> ptr) noexcept
+ : ptr_{std::move(ptr)} {}
+ explicit operator bool() const { return static_cast<bool>(ptr_); }
+ [[nodiscard]] auto operator*() const& -> map_t const& { return *ptr_; }
+ [[nodiscard]] auto operator->() const& -> map_t const* {
+ return ptr_.get();
+ }
+ [[nodiscard]] auto IsNotNull() const noexcept -> bool {
+ return static_cast<bool>(ptr_);
+ }
+ [[nodiscard]] auto LinkedMap() const& -> map_t const& { return *ptr_; }
+ [[nodiscard]] static auto Make(map_t&& map) -> ptr_t {
+ return ptr_t{std::make_shared<map_t>(std::move(map))};
+ }
+
+ private:
+ std::shared_ptr<map_t> ptr_{};
+};
+
+/// \brief Immutable LinkedMap.
+/// Uses smart pointers to build up a list of pointer-linked maps. The NextPtr
+/// that is used internally can be overloaded by any class implementing the
+/// following methods:
+/// 1. auto IsNotNull() const noexcept -> bool;
+/// 2. auto LinkedMap() const& -> LinkedMap<K, V, NextPtr> const&;
+/// 3. static auto Make(LinkedMap<K, V, NextPtr>&&) -> NextPtr;
+template <class K, class V, class NextPtr = LinkedMapPtr<K, V>>
+class LinkedMap {
+ using item_t = std::pair<K, V>;
+ using items_t = std::vector<item_t>;
+ using keys_t = std::vector<K>;
+ using values_t = std::vector<V>;
+
+ public:
+ using Ptr = NextPtr;
+ // When merging maps, we always rely on entries being traversed in key
+ // order; so keep the underlying map an ordered data structure.
+ using underlying_map_t = std::map<K, V>;
+
+ static constexpr auto MakePtr(underlying_map_t map) -> Ptr {
+ return Ptr::Make(LinkedMap<K, V, Ptr>{std::move(map)});
+ }
+
+ static constexpr auto MakePtr(item_t item) -> Ptr {
+ return Ptr::Make(LinkedMap<K, V, Ptr>{std::move(item)});
+ }
+
+ static constexpr auto MakePtr(K key, V value) -> Ptr {
+ return Ptr::Make(
+ LinkedMap<K, V, Ptr>{std::move(key), std::move(value)});
+ }
+
+ static constexpr auto MakePtr(Ptr next, Ptr content) -> Ptr {
+ return Ptr::Make(LinkedMap<K, V, Ptr>{next, content});
+ }
+
+ static constexpr auto MakePtr(Ptr next, underlying_map_t map) -> Ptr {
+ return Ptr::Make(LinkedMap<K, V, Ptr>{next, std::move(map)});
+ }
+
+ static constexpr auto MakePtr(Ptr const& next, item_t item) -> Ptr {
+ return Ptr::Make(LinkedMap<K, V, Ptr>{next, std::move(item)});
+ }
+
+ static constexpr auto MakePtr(Ptr const& next, K key, V value) -> Ptr {
+ return Ptr::Make(
+ LinkedMap<K, V, Ptr>{next, std::move(key), std::move(value)});
+ }
+
+ explicit LinkedMap(underlying_map_t map) noexcept : map_{std::move(map)} {}
+ explicit LinkedMap(item_t item) noexcept { map_.emplace(std::move(item)); }
+ LinkedMap(K key, V val) noexcept {
+ map_.emplace(std::move(key), std::move(val));
+ }
+ LinkedMap(Ptr next, Ptr content) noexcept
+ : next_{std::move(next)}, content_{std::move(content)} {}
+ LinkedMap(Ptr next, underlying_map_t map) noexcept
+ : next_{std::move(next)}, map_{std::move(map)} {}
+ LinkedMap(Ptr next, item_t item) noexcept : next_{std::move(next)} {
+ map_.emplace(std::move(item));
+ }
+ LinkedMap(Ptr next, K key, V val) noexcept : next_{std::move(next)} {
+ map_.emplace(std::move(key), std::move(val));
+ }
+
+ LinkedMap() noexcept = default;
+ LinkedMap(LinkedMap const& other) noexcept
+ : next_{other.next_},
+ content_{other.content_},
+ map_{other.map_},
+ items_{other.items_.load()} {}
+ LinkedMap(LinkedMap&& other) noexcept
+ : next_{std::move(other.next_)},
+ content_{std::move(other.content_)},
+ map_{std::move(other.map_)},
+ items_{other.items_.load()} {}
+ ~LinkedMap() noexcept = default;
+
+ auto operator=(LinkedMap const& other) noexcept -> LinkedMap& {
+ next_ = other.next_;
+ content_ = other.content_;
+ map_ = other.map_;
+ items_ = other.items_.load();
+ return *this;
+ }
+ auto operator=(LinkedMap&& other) noexcept -> LinkedMap& {
+ next_ = std::move(other.next_);
+ content_ = std::move(other.content_);
+ map_ = std::move(other.map_);
+ items_ = other.items_.load();
+ return *this;
+ }
+
+ [[nodiscard]] auto contains(K const& key) const noexcept -> bool {
+ return static_cast<bool>(Find(key));
+ }
+
+ [[nodiscard]] auto at(K const& key) const& -> V const& {
+ auto value = Find(key);
+ if (value) {
+ return value->get();
+ }
+ throw std::out_of_range{fmt::format("Missing key {}", key)};
+ }
+
+ [[nodiscard]] auto at(K const& key) && -> V {
+ auto value = Find(key);
+ if (value) {
+ return std::move(*value);
+ }
+ throw std::out_of_range{fmt::format("Missing key {}", key)};
+ }
+
+ [[nodiscard]] auto operator[](K const& key) const& -> V const& {
+ return at(key);
+ }
+
+ [[nodiscard]] auto empty() const noexcept -> bool {
+ return (content_.IsNotNull() ? content_.LinkedMap().empty()
+ : map_.empty()) and
+ (not next_.IsNotNull() or next_.LinkedMap().empty());
+ }
+
+ [[nodiscard]] auto Find(K const& key) const& noexcept
+ -> std::optional<std::reference_wrapper<V const>> {
+ if (content_.IsNotNull()) {
+ auto val = content_.LinkedMap().Find(key);
+ if (val) {
+ return val;
+ }
+ }
+ else {
+ auto it = map_.find(key);
+ if (it != map_.end()) {
+ return it->second;
+ }
+ }
+ if (next_.IsNotNull()) {
+ auto val = next_.LinkedMap().Find(key);
+ if (val) {
+ return val;
+ }
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] auto Find(K const& key) && noexcept -> std::optional<V> {
+ if (content_.IsNotNull()) {
+ auto val = content_.LinkedMap().Find(key);
+ if (val) {
+ return val->get();
+ }
+ }
+ else {
+ auto it = map_.find(key);
+ if (it != map_.end()) {
+ return std::move(it->second);
+ }
+ }
+ if (next_.IsNotNull()) {
+ auto val = next_.LinkedMap().Find(key);
+ if (val) {
+ return val->get();
+ }
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] auto FindConflictingDuplicate(LinkedMap const& other)
+ const& noexcept -> std::optional<std::reference_wrapper<K const>> {
+ auto const& my_items = Items();
+ auto const& other_items = other.Items();
+ // Search for duplicates, using that iteration over the items is
+ // orderd by keys.
+ auto me = my_items.begin();
+ auto they = other_items.begin();
+ while (me != my_items.end() and they != other_items.end()) {
+ if (me->first == they->first) {
+ if (not(me->second == they->second)) {
+ return me->first;
+ }
+ ++me;
+ ++they;
+ }
+ else if (me->first < they->first) {
+ ++me;
+ }
+ else {
+ ++they;
+ }
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] auto FindConflictingDuplicate(
+ LinkedMap const& other) && noexcept = delete;
+
+ // NOTE: Expensive, needs to compute sorted items.
+ [[nodiscard]] auto size() const noexcept -> std::size_t {
+ return Items().size();
+ }
+ // NOTE: Expensive, needs to compute sorted items.
+ [[nodiscard]] auto begin() const& -> typename items_t::const_iterator {
+ return Items().cbegin();
+ }
+ // NOTE: Expensive, needs to compute sorted items.
+ [[nodiscard]] auto end() const& -> typename items_t::const_iterator {
+ return Items().cend();
+ }
+ // NOTE: Expensive, needs to compute sorted items.
+ [[nodiscard]] auto cbegin() const& -> typename items_t::const_iterator {
+ return begin();
+ }
+ // NOTE: Expensive, needs to compute sorted items.
+ [[nodiscard]] auto cend() const& -> typename items_t::const_iterator {
+ return end();
+ }
+
+ [[nodiscard]] auto begin() && -> typename items_t::const_iterator = delete;
+ [[nodiscard]] auto end() && -> typename items_t::const_iterator = delete;
+ [[nodiscard]] auto cbegin() && -> typename items_t::const_iterator = delete;
+ [[nodiscard]] auto cend() && -> typename items_t::const_iterator = delete;
+
+ // NOTE: Expensive, needs to compute sorted items.
+ [[nodiscard]] auto operator==(
+ LinkedMap<K, V, NextPtr> const& other) const noexcept -> bool {
+ return this == &other or (this->empty() and other.empty()) or
+ this->Items() == other.Items();
+ }
+
+ // NOTE: Expensive, needs to compute sorted items.
+ [[nodiscard]] auto Items() const& -> items_t const& {
+ if (items_.load() == nullptr) {
+ if (not items_loading_.exchange(true)) {
+ items_ = std::make_shared<items_t>(ComputeSortedItems());
+ items_.notify_all();
+ }
+ else {
+ items_.wait(nullptr);
+ }
+ }
+ return *items_.load();
+ }
+
+ // NOTE: Expensive, needs to compute sorted items.
+ [[nodiscard]] auto Items() && -> items_t {
+ return items_.load() == nullptr ? ComputeSortedItems()
+ : std::move(*items_.load());
+ }
+
+ // NOTE: Expensive, needs to compute sorted items.
+ [[nodiscard]] auto Keys() const -> keys_t {
+ auto keys = keys_t{};
+ auto const& items = Items();
+ keys.reserve(items.size());
+ std::transform(items.begin(),
+ items.end(),
+ std::back_inserter(keys),
+ [](auto const& item) { return item.first; });
+ return keys;
+ }
+
+ // NOTE: Expensive, needs to compute sorted items.
+ [[nodiscard]] auto Values() const -> values_t {
+ auto values = values_t{};
+ auto const& items = Items();
+ values.reserve(items.size());
+ std::transform(items.begin(),
+ items.end(),
+ std::back_inserter(values),
+ [](auto const& item) { return item.second; });
+ return values;
+ }
+
+ private:
+ Ptr next_{}; // map that is shadowed by this map
+ Ptr content_{}; // content of this map if set
+ underlying_map_t map_{}; // content of this map if content_ is not set
+
+ mutable atomic_shared_ptr<items_t> items_{};
+ mutable std::atomic<bool> items_loading_{};
+
+ [[nodiscard]] auto ComputeSortedItems() const noexcept -> items_t {
+ auto size =
+ content_.IsNotNull() ? content_.LinkedMap().size() : map_.size();
+ if (next_.IsNotNull()) {
+ size += next_.LinkedMap().size();
+ }
+
+ auto items = items_t{};
+ items.reserve(size);
+
+ auto empty = items_t{};
+ auto map_copy = items_t{};
+ typename items_t::const_iterator citemsit;
+ typename items_t::const_iterator citemsend;
+ typename items_t::const_iterator nitemsit;
+ typename items_t::const_iterator nitemsend;
+
+ if (content_.IsNotNull()) {
+ auto const& citems = content_.LinkedMap().Items();
+ citemsit = citems.begin();
+ citemsend = citems.end();
+ }
+ else {
+ map_copy.reserve(map_.size());
+ map_copy.insert(map_copy.end(), map_.begin(), map_.end());
+ citemsit = map_copy.begin();
+ citemsend = map_copy.end();
+ }
+ if (next_.IsNotNull()) {
+ auto const& nitems = next_.LinkedMap().Items();
+ nitemsit = nitems.begin();
+ nitemsend = nitems.end();
+ }
+ else {
+ nitemsit = empty.begin();
+ nitemsend = empty.end();
+ }
+
+ while (citemsit != citemsend and nitemsit != nitemsend) {
+ if (citemsit->first == nitemsit->first) {
+ items.push_back(*citemsit);
+ ++citemsit;
+ ++nitemsit;
+ }
+ else if (citemsit->first < nitemsit->first) {
+ items.push_back(*citemsit);
+ ++citemsit;
+ }
+ else {
+ items.push_back(*nitemsit);
+ ++nitemsit;
+ }
+ }
+
+ // No more comaprisons to be made; copy over the remaining
+ // entries
+ items.insert(items.end(), citemsit, citemsend);
+ items.insert(items.end(), nitemsit, nitemsend);
+
+ return items;
+ }
+};
+
+namespace std {
+template <class K, class V, class N>
+struct hash<LinkedMap<K, V, N>> {
+ [[nodiscard]] auto operator()(LinkedMap<K, V, N> const& m) const noexcept
+ -> std::size_t {
+ size_t seed{};
+ for (auto const& e : m) {
+ hash_combine(&seed, e.first);
+ hash_combine(&seed, e.second);
+ }
+ return seed;
+ }
+};
+template <class K, class V>
+struct hash<LinkedMapPtr<K, V>> {
+ [[nodiscard]] auto operator()(LinkedMapPtr<K, V> const& p) const noexcept
+ -> std::size_t {
+ return std::hash<std::remove_cvref_t<decltype(*p)>>{}(*p);
+ }
+};
+} // namespace std
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_EXPRESSION_LINKED_MAP_HPP
diff --git a/src/buildtool/build_engine/expression/target_node.cpp b/src/buildtool/build_engine/expression/target_node.cpp
new file mode 100644
index 00000000..03fc47f5
--- /dev/null
+++ b/src/buildtool/build_engine/expression/target_node.cpp
@@ -0,0 +1,20 @@
+#include "src/buildtool/build_engine/expression/target_node.hpp"
+
+#include "src/buildtool/build_engine/expression/expression.hpp"
+
+auto TargetNode::Abstract::IsCacheable() const noexcept -> bool {
+ return target_fields->IsCacheable();
+}
+
+auto TargetNode::ToJson() const -> nlohmann::json {
+ if (IsValue()) {
+ return {{"type", "VALUE_NODE"}, {"result", GetValue()->ToJson()}};
+ }
+ auto const& data = GetAbstract();
+ return {{"type", "ABSTRACT_NODE"},
+ {"node_type", data.node_type},
+ {"string_fields", data.string_fields->ToJson()},
+ {"target_fields",
+ data.target_fields->ToJson(
+ Expression::JsonMode::SerializeAllButNodes)}};
+}
diff --git a/src/buildtool/build_engine/expression/target_node.hpp b/src/buildtool/build_engine/expression/target_node.hpp
new file mode 100644
index 00000000..a2ab9c83
--- /dev/null
+++ b/src/buildtool/build_engine/expression/target_node.hpp
@@ -0,0 +1,83 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILDENGINE_EXPRESSION_TARGET_NODE_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILDENGINE_EXPRESSION_TARGET_NODE_HPP
+
+#include <type_traits>
+#include <variant>
+
+#include "src/buildtool/build_engine/expression/expression_ptr.hpp"
+#include "src/buildtool/build_engine/expression/target_result.hpp"
+
+class TargetNode {
+ using Value = ExpressionPtr; // store result type
+
+ public:
+ struct Abstract {
+ std::string node_type; // arbitrary string that maps to rule
+ ExpressionPtr string_fields; // map to list of strings
+ ExpressionPtr target_fields; // map to list of targets
+ [[nodiscard]] auto IsCacheable() const noexcept -> bool;
+ };
+
+ template <class NodeType>
+ requires(
+ std::is_same_v<NodeType, Value> or
+ std::is_same_v<NodeType, Abstract>) explicit TargetNode(NodeType node)
+ : data_{std::move(node)},
+ is_cacheable_{std::get<NodeType>(data_).IsCacheable()} {}
+
+ [[nodiscard]] auto IsCacheable() const noexcept -> bool {
+ return is_cacheable_;
+ }
+
+ [[nodiscard]] auto IsValue() const noexcept {
+ return std::holds_alternative<Value>(data_);
+ }
+
+ [[nodiscard]] auto IsAbstract() const noexcept {
+ return std::holds_alternative<Abstract>(data_);
+ }
+
+ [[nodiscard]] auto GetValue() const -> Value const& {
+ return std::get<Value>(data_);
+ }
+
+ [[nodiscard]] auto GetAbstract() const -> Abstract const& {
+ return std::get<Abstract>(data_);
+ }
+
+ [[nodiscard]] auto operator==(TargetNode const& other) const noexcept
+ -> bool {
+ if (data_.index() == other.data_.index()) {
+ try {
+ if (IsValue()) {
+ return GetValue() == other.GetValue();
+ }
+ auto const& abs_l = GetAbstract();
+ auto const& abs_r = other.GetAbstract();
+ return abs_l.node_type == abs_r.node_type and
+ abs_l.string_fields == abs_r.string_fields and
+ abs_l.target_fields == abs_r.string_fields;
+ } catch (...) {
+ // should never happen
+ }
+ }
+ return false;
+ }
+
+ [[nodiscard]] auto ToString() const noexcept -> std::string {
+ try {
+ return ToJson().dump();
+ } catch (...) {
+ // should never happen
+ }
+ return {};
+ }
+
+ [[nodiscard]] auto ToJson() const -> nlohmann::json;
+
+ private:
+ std::variant<Value, Abstract> data_;
+ bool is_cacheable_;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILDENGINE_EXPRESSION_TARGET_NODE_HPP
diff --git a/src/buildtool/build_engine/expression/target_result.hpp b/src/buildtool/build_engine/expression/target_result.hpp
new file mode 100644
index 00000000..325d52fd
--- /dev/null
+++ b/src/buildtool/build_engine/expression/target_result.hpp
@@ -0,0 +1,33 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILDENGINE_EXPRESSION_TARGET_RESULT_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILDENGINE_EXPRESSION_TARGET_RESULT_HPP
+
+#include "src/buildtool/build_engine/expression/expression_ptr.hpp"
+#include "src/utils/cpp/hash_combine.hpp"
+
+struct TargetResult {
+ ExpressionPtr artifact_stage{};
+ ExpressionPtr provides{};
+ ExpressionPtr runfiles{};
+ bool is_cacheable{provides.IsCacheable()};
+
+ [[nodiscard]] auto operator==(TargetResult const& other) const noexcept
+ -> bool {
+ return artifact_stage == other.artifact_stage and
+ provides == other.provides and runfiles == other.runfiles;
+ }
+};
+
+namespace std {
+template <>
+struct std::hash<TargetResult> {
+ [[nodiscard]] auto operator()(TargetResult const& r) noexcept
+ -> std::size_t {
+ auto seed = std::hash<ExpressionPtr>{}(r.artifact_stage);
+ hash_combine(&seed, std::hash<ExpressionPtr>{}(r.provides));
+ hash_combine(&seed, std::hash<ExpressionPtr>{}(r.runfiles));
+ return seed;
+ }
+};
+} // namespace std
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILDENGINE_EXPRESSION_TARGET_RESULT_HPP
diff --git a/src/buildtool/build_engine/target_map/TARGETS b/src/buildtool/build_engine/target_map/TARGETS
new file mode 100644
index 00000000..71c9dd78
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/TARGETS
@@ -0,0 +1,50 @@
+{ "configured_target":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["configured_target"]
+ , "hdrs": ["configured_target.hpp"]
+ , "deps":
+ [ ["@", "fmt", "", "fmt"]
+ , ["src/buildtool/build_engine/base_maps", "entity_name"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ , ["src/utils/cpp", "hash_combine"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "target_map"]
+ }
+, "result_map":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["result_map"]
+ , "hdrs": ["result_map.hpp"]
+ , "deps":
+ [ ["src/buildtool/common", "tree"]
+ , ["src/buildtool/build_engine/analysed_target", "target"]
+ , ["src/buildtool/build_engine/target_map", "configured_target"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ , ["src/buildtool/multithreading", "task"]
+ , ["src/buildtool/multithreading", "task_system"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "target_map"]
+ }
+, "target_map":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["target_map"]
+ , "hdrs": ["target_map.hpp"]
+ , "srcs": ["utils.cpp", "built_in_rules.cpp", "export.cpp", "target_map.cpp"]
+ , "private-hdrs": ["built_in_rules.hpp", "export.hpp", "utils.hpp"]
+ , "deps":
+ [ "configured_target"
+ , "result_map"
+ , ["src/buildtool/build_engine/analysed_target", "target"]
+ , ["src/buildtool/build_engine/base_maps", "entity_name"]
+ , ["src/buildtool/build_engine/base_maps", "field_reader"]
+ , ["src/buildtool/build_engine/base_maps", "rule_map"]
+ , ["src/buildtool/build_engine/base_maps", "source_map"]
+ , ["src/buildtool/build_engine/base_maps", "targets_file_map"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ , ["src/buildtool/multithreading", "async_map_consumer"]
+ , ["src/utils/cpp", "hash_combine"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "build_engine", "target_map"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/build_engine/target_map/built_in_rules.cpp b/src/buildtool/build_engine/target_map/built_in_rules.cpp
new file mode 100644
index 00000000..b98484b6
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/built_in_rules.cpp
@@ -0,0 +1,857 @@
+#include "src/buildtool/build_engine/target_map/built_in_rules.hpp"
+
+#include <algorithm>
+#include <filesystem>
+#include <functional>
+#include <memory>
+#include <sstream>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+
+#include "src/buildtool/build_engine/base_maps/field_reader.hpp"
+#include "src/buildtool/build_engine/expression/expression_ptr.hpp"
+#include "src/buildtool/build_engine/target_map/export.hpp"
+#include "src/buildtool/build_engine/target_map/utils.hpp"
+
+namespace {
+
+auto genericRuleFields = std::unordered_set<std::string>{"arguments_config",
+ "cmds",
+ "deps",
+ "env",
+ "tainted",
+ "type",
+ "outs"};
+
+auto fileGenRuleFields = std::unordered_set<std::string>{"arguments_config",
+ "data",
+ "deps",
+ "name",
+ "tainted",
+ "type"};
+
+auto installRuleFields = std::unordered_set<std::string>{"arguments_config",
+ "deps",
+ "dirs",
+ "files",
+ "tainted",
+ "type"};
+
+void FileGenRuleWithDeps(
+ const std::vector<BuildMaps::Target::ConfiguredTarget>& dependency_keys,
+ const std::vector<AnalysedTargetPtr const*>& dependency_values,
+ const BuildMaps::Base::FieldReader::Ptr& desc,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*>& result_map) {
+ // Associate keys and values
+ std::unordered_map<BuildMaps::Target::ConfiguredTarget, AnalysedTargetPtr>
+ deps_by_transition;
+ deps_by_transition.reserve(dependency_keys.size());
+ for (size_t i = 0; i < dependency_keys.size(); ++i) {
+ deps_by_transition.emplace(dependency_keys[i], *dependency_values[i]);
+ }
+
+ auto param_vars = desc->ReadStringList("arguments_config");
+ if (not param_vars) {
+ return;
+ }
+ auto param_config = key.config.Prune(*param_vars);
+
+ auto string_fields_fcts =
+ FunctionMap::MakePtr(FunctionMap::underlying_map_t{
+ {"outs",
+ [&deps_by_transition, &key](
+ auto&& eval, auto const& expr, auto const& env) {
+ return BuildMaps::Target::Utils::keys_expr(
+ BuildMaps::Target::Utils::obtainTargetByName(
+ eval, expr, env, key.target, deps_by_transition)
+ ->Artifacts());
+ }},
+ {"runfiles",
+ [&deps_by_transition, &key](
+ auto&& eval, auto const& expr, auto const& env) {
+ return BuildMaps::Target::Utils::keys_expr(
+ BuildMaps::Target::Utils::obtainTargetByName(
+ eval, expr, env, key.target, deps_by_transition)
+ ->RunFiles());
+ }}});
+
+ auto tainted = std::set<std::string>{};
+ auto got_tainted = BuildMaps::Target::Utils::getTainted(
+ &tainted,
+ param_config,
+ desc->ReadOptionalExpression("tainted", Expression::kEmptyList),
+ logger);
+ if (not got_tainted) {
+ return;
+ }
+ for (auto const& dep : dependency_values) {
+ if (not std::includes(tainted.begin(),
+ tainted.end(),
+ (*dep)->Tainted().begin(),
+ (*dep)->Tainted().end())) {
+ (*logger)(
+ "Not tainted with all strings the dependencies are tainted "
+ "with",
+ true);
+ return;
+ }
+ }
+
+ auto file_name_exp = desc->ReadOptionalExpression(
+ "name", ExpressionPtr{std::string{"out.txt"}});
+ if (not file_name_exp) {
+ return;
+ }
+ auto file_name_val = file_name_exp.Evaluate(
+ param_config, string_fields_fcts, [logger](auto const& msg) {
+ (*logger)(fmt::format("While evaluating name:\n{}", msg), true);
+ });
+ if (not file_name_val) {
+ return;
+ }
+ if (not file_name_val->IsString()) {
+ (*logger)(fmt::format("name should evaluate to a string, but got {}",
+ file_name_val->ToString()),
+ true);
+ return;
+ }
+ auto data_exp =
+ desc->ReadOptionalExpression("data", ExpressionPtr{std::string{""}});
+ if (not data_exp) {
+ return;
+ }
+ auto data_val = data_exp.Evaluate(
+ param_config, string_fields_fcts, [logger](auto const& msg) {
+ (*logger)(fmt::format("While evaluating data:\n{}", msg), true);
+ });
+ if (not data_val) {
+ return;
+ }
+ if (not data_val->IsString()) {
+ (*logger)(fmt::format("data should evaluate to a string, but got {}",
+ data_val->ToString()),
+ true);
+ return;
+ }
+ auto stage = ExpressionPtr{Expression::map_t{
+ file_name_val->String(),
+ ExpressionPtr{ArtifactDescription{
+ {ComputeHash(data_val->String()), data_val->String().size()},
+ ObjectType::File}}}};
+
+ auto vars_set = std::unordered_set<std::string>{};
+ vars_set.insert(param_vars->begin(), param_vars->end());
+ auto analysis_result = std::make_shared<AnalysedTarget>(
+ TargetResult{stage, ExpressionPtr{Expression::map_t{}}, stage},
+ std::vector<ActionDescription>{},
+ std::vector<std::string>{data_val->String()},
+ std::vector<Tree>{},
+ std::move(vars_set),
+ std::move(tainted));
+ analysis_result =
+ result_map->Add(key.target, param_config, std::move(analysis_result));
+ (*setter)(std::move(analysis_result));
+}
+
+void FileGenRule(
+ const nlohmann::json& desc_json,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*>& result_map) {
+ auto desc = BuildMaps::Base::FieldReader::CreatePtr(
+ desc_json, key.target, "file-generation target", logger);
+ desc->ExpectFields(fileGenRuleFields);
+ auto param_vars = desc->ReadStringList("arguments_config");
+ if (not param_vars) {
+ return;
+ }
+ auto param_config = key.config.Prune(*param_vars);
+
+ // Collect dependencies: deps
+ auto const& empty_list = Expression::kEmptyList;
+ auto deps_exp = desc->ReadOptionalExpression("deps", empty_list);
+ if (not deps_exp) {
+ return;
+ }
+ auto deps_value =
+ deps_exp.Evaluate(param_config, {}, [&logger](auto const& msg) {
+ (*logger)(fmt::format("While evaluating deps:\n{}", msg), true);
+ });
+ if (not deps_value) {
+ return;
+ }
+ if (not deps_value->IsList()) {
+ (*logger)(fmt::format("Expected deps to evaluate to a list of targets, "
+ "but found {}",
+ deps_value->ToString()),
+ true);
+ return;
+ }
+ std::vector<BuildMaps::Target::ConfiguredTarget> dependency_keys;
+ for (auto const& dep_name : deps_value->List()) {
+ auto dep_target = BuildMaps::Base::ParseEntityNameFromExpression(
+ dep_name,
+ key.target,
+ [&logger, &dep_name](std::string const& parse_err) {
+ (*logger)(fmt::format("Parsing dep entry {} failed with:\n{}",
+ dep_name->ToString(),
+ parse_err),
+ true);
+ });
+ if (not dep_target) {
+ return;
+ }
+ dependency_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{*dep_target, key.config});
+ }
+ (*subcaller)(
+ dependency_keys,
+ [dependency_keys, desc, setter, logger, key, result_map](
+ auto const& values) {
+ FileGenRuleWithDeps(
+ dependency_keys, values, desc, key, setter, logger, result_map);
+ },
+ logger);
+}
+
+void InstallRuleWithDeps(
+ const std::vector<BuildMaps::Target::ConfiguredTarget>& dependency_keys,
+ const std::vector<AnalysedTargetPtr const*>& dependency_values,
+ const BuildMaps::Base::FieldReader::Ptr& desc,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const std::vector<BuildMaps::Base::EntityName>& deps,
+ const std::unordered_map<std::string, BuildMaps::Base::EntityName>& files,
+ const std::vector<std::pair<BuildMaps::Base::EntityName, std::string>>&
+ dirs,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*>& result_map) {
+ // Associate keys and values
+ std::unordered_map<BuildMaps::Base::EntityName, AnalysedTargetPtr>
+ deps_by_target;
+ deps_by_target.reserve(dependency_keys.size());
+ for (size_t i = 0; i < dependency_keys.size(); ++i) {
+ deps_by_target.emplace(dependency_keys[i].target,
+ *dependency_values[i]);
+ }
+
+ // Compute the effective dependecy on config variables
+ std::unordered_set<std::string> effective_vars;
+ auto param_vars = desc->ReadStringList("arguments_config");
+ effective_vars.insert(param_vars->begin(), param_vars->end());
+ for (auto const& [target_name, target] : deps_by_target) {
+ effective_vars.insert(target->Vars().begin(), target->Vars().end());
+ }
+ auto effective_conf = key.config.Prune(effective_vars);
+
+ // Compute and verify taintedness
+ auto tainted = std::set<std::string>{};
+ auto got_tainted = BuildMaps::Target::Utils::getTainted(
+ &tainted,
+ key.config.Prune(*param_vars),
+ desc->ReadOptionalExpression("tainted", Expression::kEmptyList),
+ logger);
+ if (not got_tainted) {
+ return;
+ }
+ for (auto const& dep : dependency_values) {
+ if (not std::includes(tainted.begin(),
+ tainted.end(),
+ (*dep)->Tainted().begin(),
+ (*dep)->Tainted().end())) {
+ (*logger)(
+ "Not tainted with all strings the dependencies are tainted "
+ "with",
+ true);
+ return;
+ }
+ }
+
+ // Stage deps (runfiles only)
+ auto stage = ExpressionPtr{Expression::map_t{}};
+ for (auto const& dep : deps) {
+ auto to_stage = deps_by_target.at(dep)->RunFiles();
+ auto dup = stage->Map().FindConflictingDuplicate(to_stage->Map());
+ if (dup) {
+ (*logger)(fmt::format("Staging conflict for path {}", dup->get()),
+ true);
+ return;
+ }
+ stage = ExpressionPtr{Expression::map_t{stage, to_stage}};
+ }
+
+ // stage files (artifacts, but fall back to runfiles)
+ auto files_stage = Expression::map_t::underlying_map_t{};
+ for (auto const& [path, target] : files) {
+ if (stage->Map().contains(path)) {
+ (*logger)(fmt::format("Staging conflict for path {}", path), true);
+ return;
+ }
+ auto artifacts = deps_by_target[target]->Artifacts();
+ if (artifacts->Map().empty()) {
+ // If no artifacts are present, fall back to runfiles
+ artifacts = deps_by_target[target]->RunFiles();
+ }
+ if (artifacts->Map().empty()) {
+ (*logger)(fmt::format(
+ "No artifacts or runfiles for {} to be staged to {}",
+ target.ToString(),
+ path),
+ true);
+ return;
+ }
+ if (artifacts->Map().size() != 1) {
+ (*logger)(
+ fmt::format("Not precisely one entry for {} to be staged to {}",
+ target.ToString(),
+ path),
+ true);
+ return;
+ }
+ files_stage.emplace(path, artifacts->Map().Values()[0]);
+ }
+ stage = ExpressionPtr{Expression::map_t{stage, files_stage}};
+
+ // stage dirs (artifacts and runfiles)
+ for (auto const& subdir : dirs) {
+ auto subdir_stage = Expression::map_t::underlying_map_t{};
+ auto dir_path = std::filesystem::path{subdir.second};
+ auto target = deps_by_target.at(subdir.first);
+ // within a target, artifacts and runfiles may overlap, but artifacts
+ // take perference
+ for (auto const& [path, artifact] : target->Artifacts()->Map()) {
+ subdir_stage.emplace((dir_path / path).string(), artifact);
+ }
+ for (auto const& [path, artifact] : target->RunFiles()->Map()) {
+ subdir_stage.emplace((dir_path / path).string(), artifact);
+ }
+ auto to_stage = ExpressionPtr{Expression::map_t{subdir_stage}};
+ auto dup = stage->Map().FindConflictingDuplicate(to_stage->Map());
+ if (dup) {
+ (*logger)(fmt::format("Staging conflict for path {}", dup->get()),
+ true);
+ return;
+ }
+ stage = ExpressionPtr{Expression::map_t{stage, to_stage}};
+ }
+
+ auto const& empty_map = Expression::kEmptyMap;
+ auto result =
+ std::make_shared<AnalysedTarget>(TargetResult{stage, empty_map, stage},
+ std::vector<ActionDescription>{},
+ std::vector<std::string>{},
+ std::vector<Tree>{},
+ std::move(effective_vars),
+ std::move(tainted));
+
+ result = result_map->Add(key.target, effective_conf, std::move(result));
+ (*setter)(std::move(result));
+}
+
+void InstallRule(
+ const nlohmann::json& desc_json,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*>& result_map) {
+ auto desc = BuildMaps::Base::FieldReader::CreatePtr(
+ desc_json, key.target, "install target", logger);
+ desc->ExpectFields(installRuleFields);
+ auto param_vars = desc->ReadStringList("arguments_config");
+ if (not param_vars) {
+ return;
+ }
+ auto param_config = key.config.Prune(*param_vars);
+
+ // Collect dependencies: deps
+ auto const& empty_list = Expression::kEmptyList;
+ auto deps_exp = desc->ReadOptionalExpression("deps", empty_list);
+ if (not deps_exp) {
+ return;
+ }
+ auto deps_value =
+ deps_exp.Evaluate(param_config, {}, [&logger](auto const& msg) {
+ (*logger)(fmt::format("While evaluating deps:\n{}", msg), true);
+ });
+ if (not deps_value) {
+ return;
+ }
+ if (not deps_value->IsList()) {
+ (*logger)(fmt::format("Expected deps to evaluate to a list of targets, "
+ "but found {}",
+ deps_value->ToString()),
+ true);
+ return;
+ }
+ std::vector<BuildMaps::Target::ConfiguredTarget> dependency_keys;
+ std::vector<BuildMaps::Base::EntityName> deps;
+ deps.reserve(deps_value->List().size());
+ for (auto const& dep_name : deps_value->List()) {
+ auto dep_target = BuildMaps::Base::ParseEntityNameFromExpression(
+ dep_name,
+ key.target,
+ [&logger, &dep_name](std::string const& parse_err) {
+ (*logger)(fmt::format("Parsing dep entry {} failed with:\n{}",
+ dep_name->ToString(),
+ parse_err),
+ true);
+ });
+ if (not dep_target) {
+ return;
+ }
+ dependency_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{*dep_target, key.config});
+ deps.emplace_back(*dep_target);
+ }
+
+ // Collect dependencies: files
+ auto const& empty_map = Expression::kEmptyMap;
+ auto files_exp = desc->ReadOptionalExpression("files", empty_map);
+ if (not files_exp) {
+ return;
+ }
+ if (not files_exp->IsMap()) {
+ (*logger)(fmt::format("Expected files to be a map of target "
+ "expressions, but found {}",
+ files_exp->ToString()),
+ true);
+ return;
+ }
+ auto files = std::unordered_map<std::string, BuildMaps::Base::EntityName>{};
+ files.reserve(files_exp->Map().size());
+ for (auto const& [path, dep_exp] : files_exp->Map()) {
+ std::string path_ = path; // Have a variable to capture
+ auto dep_name = dep_exp.Evaluate(
+ param_config, {}, [&logger, &path_](auto const& msg) {
+ (*logger)(
+ fmt::format(
+ "While evaluating files entry for {}:\n{}", path_, msg),
+ true);
+ });
+ if (not dep_name) {
+ return;
+ }
+ auto dep_target = BuildMaps::Base::ParseEntityNameFromExpression(
+ dep_name,
+ key.target,
+ [&logger, &dep_name, &path = path](std::string const& parse_err) {
+ (*logger)(fmt::format("Parsing file entry {} for key {} failed "
+ "with:\n{}",
+ dep_name->ToString(),
+ path,
+ parse_err),
+ true);
+ });
+ if (not dep_target) {
+ return;
+ }
+ dependency_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{*dep_target, key.config});
+ files.emplace(path, *dep_target);
+ }
+
+ // Collect dependencies: dirs
+ auto dirs_exp = desc->ReadOptionalExpression("dirs", empty_list);
+ if (not dirs_exp) {
+ return;
+ }
+ auto dirs_value =
+ dirs_exp.Evaluate(param_config, {}, [&logger](auto const& msg) {
+ (*logger)(fmt::format("While evaluating deps:\n{}", msg), true);
+ });
+ if (not dirs_value) {
+ return;
+ }
+ if (not dirs_value->IsList()) {
+ (*logger)(fmt::format("Expected dirs to evaluate to a list of "
+ "path-target pairs, but found {}",
+ dirs_value->ToString()),
+ true);
+ return;
+ }
+ auto dirs =
+ std::vector<std::pair<BuildMaps::Base::EntityName, std::string>>{};
+ dirs.reserve(dirs_value->List().size());
+ for (auto const& entry : dirs_value->List()) {
+ if (not(entry->IsList() and entry->List().size() == 2 and
+ entry->List()[1]->IsString())) {
+ (*logger)(fmt::format("Expected dirs to evaluate to a list of "
+ "target-path pairs, but found entry {}",
+ entry->ToString()),
+ true);
+ return;
+ }
+ auto dep_target = BuildMaps::Base::ParseEntityNameFromExpression(
+ entry->List()[0],
+ key.target,
+ [&logger, &entry](std::string const& parse_err) {
+ (*logger)(fmt::format("Parsing dir entry {} for path {} failed "
+ "with:\n{}",
+ entry->List()[0]->ToString(),
+ entry->List()[1]->String(),
+ parse_err),
+ true);
+ });
+ if (not dep_target) {
+ return;
+ }
+ dependency_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{*dep_target, key.config});
+ dirs.emplace_back(std::pair<BuildMaps::Base::EntityName, std::string>{
+ *dep_target, entry->List()[1]->String()});
+ }
+
+ (*subcaller)(
+ dependency_keys,
+ [dependency_keys,
+ deps = std::move(deps),
+ files = std::move(files),
+ dirs = std::move(dirs),
+ desc,
+ setter,
+ logger,
+ key,
+ result_map](auto const& values) {
+ InstallRuleWithDeps(dependency_keys,
+ values,
+ desc,
+ key,
+ deps,
+ files,
+ dirs,
+ setter,
+ logger,
+ result_map);
+ },
+ logger);
+}
+
+void GenericRuleWithDeps(
+ const std::vector<BuildMaps::Target::ConfiguredTarget>& transition_keys,
+ const std::vector<AnalysedTargetPtr const*>& dependency_values,
+ const BuildMaps::Base::FieldReader::Ptr& desc,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*>& result_map) {
+ // Associate dependency keys with values
+ std::unordered_map<BuildMaps::Target::ConfiguredTarget, AnalysedTargetPtr>
+ deps_by_transition;
+ deps_by_transition.reserve(transition_keys.size());
+ for (size_t i = 0; i < transition_keys.size(); ++i) {
+ deps_by_transition.emplace(transition_keys[i], *dependency_values[i]);
+ }
+
+ // Compute the effective dependecy on config variables
+ std::unordered_set<std::string> effective_vars;
+ auto param_vars = desc->ReadStringList("arguments_config");
+ effective_vars.insert(param_vars->begin(), param_vars->end());
+ for (auto const& [transition, target] : deps_by_transition) {
+ effective_vars.insert(target->Vars().begin(), target->Vars().end());
+ }
+ auto effective_conf = key.config.Prune(effective_vars);
+
+ // Compute and verify taintedness
+ auto tainted = std::set<std::string>{};
+ auto got_tainted = BuildMaps::Target::Utils::getTainted(
+ &tainted,
+ key.config.Prune(*param_vars),
+ desc->ReadOptionalExpression("tainted", Expression::kEmptyList),
+ logger);
+ if (not got_tainted) {
+ return;
+ }
+ for (auto const& dep : dependency_values) {
+ if (not std::includes(tainted.begin(),
+ tainted.end(),
+ (*dep)->Tainted().begin(),
+ (*dep)->Tainted().end())) {
+ (*logger)(
+ "Not tainted with all strings the dependencies are tainted "
+ "with",
+ true);
+ return;
+ }
+ }
+
+ // Evaluate cmd, outs, env
+ auto string_fields_fcts =
+ FunctionMap::MakePtr(FunctionMap::underlying_map_t{
+ {"outs",
+ [&deps_by_transition, &key](
+ auto&& eval, auto const& expr, auto const& env) {
+ return BuildMaps::Target::Utils::keys_expr(
+ BuildMaps::Target::Utils::obtainTargetByName(
+ eval, expr, env, key.target, deps_by_transition)
+ ->Artifacts());
+ }},
+ {"runfiles",
+ [&deps_by_transition, &key](
+ auto&& eval, auto const& expr, auto const& env) {
+ return BuildMaps::Target::Utils::keys_expr(
+ BuildMaps::Target::Utils::obtainTargetByName(
+ eval, expr, env, key.target, deps_by_transition)
+ ->RunFiles());
+ }}});
+ auto const& empty_list = Expression::kEmptyList;
+ auto param_config = key.config.Prune(*param_vars);
+ auto outs_exp = desc->ReadOptionalExpression("outs", empty_list);
+ if (not outs_exp) {
+ return;
+ }
+ auto outs_value = outs_exp.Evaluate(
+ param_config, string_fields_fcts, [&logger](auto const& msg) {
+ (*logger)(fmt::format("While evaluating outs:\n{}", msg), true);
+ });
+ if (not outs_value) {
+ return;
+ }
+ if ((not outs_value->IsList()) or outs_value->List().empty()) {
+ (*logger)(fmt::format("outs has to evaluate to a non-empty list of "
+ "strings, but found {}",
+ outs_value->ToString()),
+ true);
+ return;
+ }
+ std::vector<std::string> outs{};
+ outs.reserve(outs_value->List().size());
+ for (auto const& x : outs_value->List()) {
+ if (not x->IsString()) {
+ (*logger)(fmt::format("outs has to evaluate to a non-empty list of "
+ "strings, but found entry {}",
+ x->ToString()),
+ true);
+ return;
+ }
+ outs.emplace_back(x->String());
+ }
+ auto cmd_exp = desc->ReadOptionalExpression("cmds", empty_list);
+ if (not cmd_exp) {
+ return;
+ }
+ auto cmd_value = cmd_exp.Evaluate(
+ param_config, string_fields_fcts, [&logger](auto const& msg) {
+ (*logger)(fmt::format("While evaluating cmds:\n{}", msg), true);
+ });
+ if (not cmd_value) {
+ return;
+ }
+ if (not cmd_value->IsList()) {
+ (*logger)(fmt::format(
+ "cmds has to evaluate to a list of strings, but found {}",
+ cmd_value->ToString()),
+ true);
+ return;
+ }
+ std::stringstream cmd_ss{};
+ for (auto const& x : cmd_value->List()) {
+ if (not x->IsString()) {
+ (*logger)(fmt::format("cmds has to evaluate to a list of strings, "
+ "but found entry {}",
+ x->ToString()),
+ true);
+ return;
+ }
+ cmd_ss << x->String();
+ cmd_ss << "\n";
+ }
+ auto const& empty_map_exp = Expression::kEmptyMapExpr;
+ auto env_exp = desc->ReadOptionalExpression("env", empty_map_exp);
+ if (not env_exp) {
+ return;
+ }
+ auto env_val = env_exp.Evaluate(
+ param_config, string_fields_fcts, [&logger](auto const& msg) {
+ (*logger)(fmt::format("While evaluating env:\n{}", msg), true);
+ });
+ if (not env_val) {
+ return;
+ }
+ if (not env_val->IsMap()) {
+ (*logger)(
+ fmt::format("cmds has to evaluate to map of strings, but found {}",
+ env_val->ToString()),
+ true);
+ }
+ for (auto const& [var_name, x] : env_val->Map()) {
+ if (not x->IsString()) {
+ (*logger)(fmt::format("env has to evaluate to map of strings, but "
+ "found entry {}",
+ x->ToString()),
+ true);
+ }
+ }
+
+ // Construct inputs; in case of conflicts, artifacts take precedence
+ // over runfiles.
+ auto inputs = ExpressionPtr{Expression::map_t{}};
+ for (auto const& dep : dependency_values) {
+ inputs = ExpressionPtr{Expression::map_t{inputs, (*dep)->RunFiles()}};
+ }
+ for (auto const& dep : dependency_values) {
+ inputs = ExpressionPtr{Expression::map_t{inputs, (*dep)->Artifacts()}};
+ }
+
+ // Construct our single action, and its artifacts
+ auto action =
+ BuildMaps::Target::Utils::createAction(outs,
+ {},
+ {"sh", "-c", cmd_ss.str()},
+ env_val,
+ std::nullopt,
+ false,
+ inputs);
+ auto action_identifier = action.Id();
+ Expression::map_t::underlying_map_t artifacts;
+ for (auto const& path : outs) {
+ artifacts.emplace(path,
+ ExpressionPtr{ArtifactDescription{
+ action_identifier, std::filesystem::path{path}}});
+ }
+
+ auto const& empty_map = Expression::kEmptyMap;
+ auto result = std::make_shared<AnalysedTarget>(
+ TargetResult{
+ ExpressionPtr{Expression::map_t{artifacts}}, empty_map, empty_map},
+ std::vector<ActionDescription>{action},
+ std::vector<std::string>{},
+ std::vector<Tree>{},
+ std::move(effective_vars),
+ std::move(tainted));
+
+ result = result_map->Add(key.target, effective_conf, std::move(result));
+ (*setter)(std::move(result));
+}
+
+void GenericRule(
+ const nlohmann::json& desc_json,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*> result_map) {
+ auto desc = BuildMaps::Base::FieldReader::CreatePtr(
+ desc_json, key.target, "generic target", logger);
+ desc->ExpectFields(genericRuleFields);
+ auto param_vars = desc->ReadStringList("arguments_config");
+ if (not param_vars) {
+ return;
+ }
+ auto param_config = key.config.Prune(*param_vars);
+ auto const& empty_list = Expression::kEmptyList;
+ auto deps_exp = desc->ReadOptionalExpression("deps", empty_list);
+ if (not deps_exp) {
+ return;
+ }
+ auto deps_value =
+ deps_exp.Evaluate(param_config, {}, [&logger](auto const& msg) {
+ (*logger)(fmt::format("While evaluating deps:\n{}", msg), true);
+ });
+ if (not deps_value->IsList()) {
+ (*logger)(fmt::format("Expected deps to evaluate to a list of targets, "
+ "but found {}",
+ deps_value->ToString()),
+ true);
+ return;
+ }
+ std::vector<BuildMaps::Target::ConfiguredTarget> dependency_keys;
+ std::vector<BuildMaps::Target::ConfiguredTarget> transition_keys;
+ dependency_keys.reserve(deps_value->List().size());
+ transition_keys.reserve(deps_value->List().size());
+ auto empty_transition = Configuration{Expression::kEmptyMap};
+ for (auto const& dep_name : deps_value->List()) {
+ auto dep_target = BuildMaps::Base::ParseEntityNameFromExpression(
+ dep_name,
+ key.target,
+ [&logger, &dep_name](std::string const& parse_err) {
+ (*logger)(fmt::format("Parsing dep entry {} failed with:\n{}",
+ dep_name->ToString(),
+ parse_err),
+ true);
+ });
+ if (not dep_target) {
+ return;
+ }
+ dependency_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{*dep_target, key.config});
+ transition_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{*dep_target, empty_transition});
+ }
+ (*subcaller)(
+ dependency_keys,
+ [transition_keys = std::move(transition_keys),
+ desc,
+ setter,
+ logger,
+ key,
+ result_map](auto const& values) {
+ GenericRuleWithDeps(
+ transition_keys, values, desc, key, setter, logger, result_map);
+ },
+ logger);
+}
+
+auto built_ins = std::unordered_map<
+ std::string,
+ std::function<void(
+ const nlohmann::json&,
+ const BuildMaps::Target::ConfiguredTarget&,
+ const BuildMaps::Target::TargetMap::SubCallerPtr&,
+ const BuildMaps::Target::TargetMap::SetterPtr&,
+ const BuildMaps::Target::TargetMap::LoggerPtr&,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*>)>>{
+ {"export", ExportRule},
+ {"file_gen", FileGenRule},
+ {"generic", GenericRule},
+ {"install", InstallRule}};
+} // namespace
+
+namespace BuildMaps::Target {
+
+auto IsBuiltInRule(nlohmann::json const& rule_type) -> bool {
+
+ if (not rule_type.is_string()) {
+ // Names for built-in rules are always strings
+ return false;
+ }
+ auto rule_name = rule_type.get<std::string>();
+ return built_ins.contains(rule_name);
+}
+
+auto HandleBuiltin(
+ const nlohmann::json& rule_type,
+ const nlohmann::json& desc,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*>& result_map)
+ -> bool {
+ if (not rule_type.is_string()) {
+ // Names for built-in rules are always strings
+ return false;
+ }
+ auto rule_name = rule_type.get<std::string>();
+ auto it = built_ins.find(rule_name);
+ if (it == built_ins.end()) {
+ return false;
+ }
+ auto target_logger = std::make_shared<BuildMaps::Target::TargetMap::Logger>(
+ [logger, rule_name, key](auto msg, auto fatal) {
+ (*logger)(fmt::format("While evaluating {} target {}:\n{}",
+ rule_name,
+ key.target.ToString(),
+ msg),
+ fatal);
+ });
+ (it->second)(desc, key, subcaller, setter, target_logger, result_map);
+ return true;
+}
+} // namespace BuildMaps::Target
diff --git a/src/buildtool/build_engine/target_map/built_in_rules.hpp b/src/buildtool/build_engine/target_map/built_in_rules.hpp
new file mode 100644
index 00000000..0e56e38d
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/built_in_rules.hpp
@@ -0,0 +1,21 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_BUILT_IN_RULES_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_BUILT_IN_RULES_HPP
+
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/target_map/configured_target.hpp"
+#include "src/buildtool/build_engine/target_map/result_map.hpp"
+#include "src/buildtool/build_engine/target_map/target_map.hpp"
+
+namespace BuildMaps::Target {
+auto HandleBuiltin(
+ const nlohmann::json& rule_type,
+ const nlohmann::json& desc,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*>& result_map)
+ -> bool;
+} // namespace BuildMaps::Target
+#endif
diff --git a/src/buildtool/build_engine/target_map/configured_target.hpp b/src/buildtool/build_engine/target_map/configured_target.hpp
new file mode 100644
index 00000000..b0443cbd
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/configured_target.hpp
@@ -0,0 +1,41 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_CONFIGURED_TARGET_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_CONFIGURED_TARGET_HPP
+
+#include "fmt/core.h"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/expression/configuration.hpp"
+#include "src/utils/cpp/hash_combine.hpp"
+
+namespace BuildMaps::Target {
+
+struct ConfiguredTarget {
+ BuildMaps::Base::EntityName target;
+ Configuration config;
+
+ [[nodiscard]] auto operator==(
+ BuildMaps::Target::ConfiguredTarget const& other) const noexcept
+ -> bool {
+ return target == other.target && config == other.config;
+ }
+
+ [[nodiscard]] auto ToString() const noexcept -> std::string {
+ return fmt::format("[{},{}]", target.ToString(), config.ToString());
+ }
+};
+
+} // namespace BuildMaps::Target
+
+namespace std {
+template <>
+struct hash<BuildMaps::Target::ConfiguredTarget> {
+ [[nodiscard]] auto operator()(BuildMaps::Target::ConfiguredTarget const& ct)
+ const noexcept -> std::size_t {
+ size_t seed{};
+ hash_combine<>(&seed, ct.target);
+ hash_combine<>(&seed, ct.config);
+ return seed;
+ }
+};
+} // namespace std
+
+#endif
diff --git a/src/buildtool/build_engine/target_map/export.cpp b/src/buildtool/build_engine/target_map/export.cpp
new file mode 100644
index 00000000..cea76d36
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/export.cpp
@@ -0,0 +1,126 @@
+#include "src/buildtool/build_engine/target_map/export.hpp"
+
+#include <unordered_set>
+
+#include "src/buildtool/build_engine/base_maps/field_reader.hpp"
+#include "src/buildtool/build_engine/expression/configuration.hpp"
+
+namespace {
+auto expectedFields = std::unordered_set<std::string>{"config_doc",
+ "doc",
+ "fixed_config",
+ "flexible_config",
+ "target",
+ "type"};
+
+void FinalizeExport(
+ const std::vector<AnalysedTargetPtr const*>& exported,
+ const BuildMaps::Base::EntityName& target,
+ const std::vector<std::string>& vars,
+ const Configuration& effective_config,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*>& result_map) {
+ const auto* value = exported[0];
+ if (not(*value)->Tainted().empty()) {
+ (*logger)("Only untainted targets can be exported.", true);
+ return;
+ }
+ auto provides = (*value)->Provides();
+ if (not provides->IsCacheable()) {
+ (*logger)(fmt::format("Only cacheable values can be exported; but "
+ "target provides {}",
+ provides->ToString()),
+ true);
+ return;
+ }
+ std::unordered_set<std::string> vars_set{};
+ vars_set.insert(vars.begin(), vars.end());
+ // TODO(aehlig): wrap all artifacts into "save to target-cache" special
+ // action
+ auto analysis_result = std::make_shared<AnalysedTarget>(
+ TargetResult{(*value)->Artifacts(), provides, (*value)->RunFiles()},
+ std::vector<ActionDescription>{},
+ std::vector<std::string>{},
+ std::vector<Tree>{},
+ std::move(vars_set),
+ std::set<std::string>{});
+ analysis_result =
+ result_map->Add(target, effective_config, std::move(analysis_result));
+ (*setter)(std::move(analysis_result));
+}
+} // namespace
+
+void ExportRule(
+ const nlohmann::json& desc_json,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*> result_map) {
+ auto desc = BuildMaps::Base::FieldReader::CreatePtr(
+ desc_json, key.target, "export target", logger);
+ desc->ExpectFields(expectedFields);
+ auto exported_target_name = desc->ReadExpression("target");
+ if (not exported_target_name) {
+ return;
+ }
+ auto exported_target = BuildMaps::Base::ParseEntityNameFromExpression(
+ exported_target_name,
+ key.target,
+ [&logger, &exported_target_name](std::string const& parse_err) {
+ (*logger)(fmt::format("Parsing target name {} failed with:\n{}",
+ exported_target_name->ToString(),
+ parse_err),
+ true);
+ });
+ if (not exported_target) {
+ return;
+ }
+ auto flexible_vars = desc->ReadStringList("flexible_config");
+ if (not flexible_vars) {
+ return;
+ }
+ auto effective_config = key.config.Prune(*flexible_vars);
+
+ // TODO(aehlig): if the respository is content-fixed, look up in target
+ // cache with key consistig of repository-description, target, and effective
+ // config.
+
+ auto fixed_config =
+ desc->ReadOptionalExpression("fixed_config", Expression::kEmptyMap);
+ if (not fixed_config->IsMap()) {
+ (*logger)(fmt::format("fixed_config has to be a map, but found {}",
+ fixed_config->ToString()),
+ true);
+ return;
+ }
+ for (auto const& var : fixed_config->Map().Keys()) {
+ if (effective_config.VariableFixed(var)) {
+ (*logger)(
+ fmt::format("Variable {} is both fixed and flexible.", var),
+ true);
+ return;
+ }
+ }
+ auto target_config = effective_config.Update(fixed_config);
+
+ (*subcaller)(
+ {BuildMaps::Target::ConfiguredTarget{std::move(*exported_target),
+ std::move(target_config)}},
+ [setter,
+ logger,
+ vars = std::move(*flexible_vars),
+ result_map,
+ effective_config = std::move(effective_config),
+ target = key.target](auto const& values) {
+ FinalizeExport(values,
+ target,
+ vars,
+ effective_config,
+ logger,
+ setter,
+ result_map);
+ },
+ logger);
+}
diff --git a/src/buildtool/build_engine/target_map/export.hpp b/src/buildtool/build_engine/target_map/export.hpp
new file mode 100644
index 00000000..8b2e17f2
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/export.hpp
@@ -0,0 +1,17 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_EXPORT_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_EXPORT_HPP
+
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/target_map/configured_target.hpp"
+#include "src/buildtool/build_engine/target_map/result_map.hpp"
+#include "src/buildtool/build_engine/target_map/target_map.hpp"
+
+void ExportRule(const nlohmann::json& desc_json,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ gsl::not_null<BuildMaps::Target::ResultTargetMap*> result_map);
+
+#endif
diff --git a/src/buildtool/build_engine/target_map/result_map.hpp b/src/buildtool/build_engine/target_map/result_map.hpp
new file mode 100644
index 00000000..b5825ca4
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/result_map.hpp
@@ -0,0 +1,291 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_RESULT_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_RESULT_MAP_HPP
+
+#include <algorithm>
+#include <fstream>
+#include <mutex>
+#include <string>
+#include <thread>
+#include <vector>
+
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/analysed_target/analysed_target.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/buildtool/build_engine/target_map/configured_target.hpp"
+#include "src/buildtool/common/tree.hpp"
+#include "src/buildtool/multithreading/task.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+#include "src/utils/cpp/hash_combine.hpp"
+
+namespace BuildMaps::Target {
+
+// Class collecting analysed targets for their canonical configuration.
+class ResultTargetMap {
+ public:
+ struct ActionWithOrigin {
+ ActionDescription desc;
+ nlohmann::json origin;
+ };
+
+ template <bool kIncludeOrigins = false>
+ struct ResultType {
+ std::vector<ActionDescription> actions{};
+ std::vector<std::string> blobs{};
+ std::vector<Tree> trees{};
+ };
+
+ template <>
+ struct ResultType</*kIncludeOrigins=*/true> {
+ std::vector<ActionWithOrigin> actions{};
+ std::vector<std::string> blobs{};
+ std::vector<Tree> trees{};
+ };
+
+ explicit ResultTargetMap(std::size_t jobs) : width_{ComputeWidth(jobs)} {}
+
+ ResultTargetMap() = default;
+
+ // \brief Add the analysed target for the given target and
+ // configuration, if no entry is present for the given
+ // target-configuration pair. \returns the analysed target that is
+ // element of the map after insertion.
+ [[nodiscard]] auto Add(BuildMaps::Base::EntityName name,
+ Configuration conf,
+ gsl::not_null<AnalysedTargetPtr> result)
+ -> AnalysedTargetPtr {
+ auto part = std::hash<BuildMaps::Base::EntityName>{}(name) % width_;
+ std::unique_lock lock{m_[part]};
+ auto [entry, inserted] = targets_[part].emplace(
+ ConfiguredTarget{std::move(name), std::move(conf)},
+ std::move(result));
+ if (inserted) {
+ num_actions_[part] += entry->second->Actions().size();
+ num_blobs_[part] += entry->second->Blobs().size();
+ num_trees_[part] += entry->second->Trees().size();
+ }
+ return entry->second;
+ }
+
+ [[nodiscard]] auto ConfiguredTargets() const noexcept
+ -> std::vector<ConfiguredTarget> {
+ std::vector<ConfiguredTarget> targets{};
+ size_t s = 0;
+ for (const auto& target : targets_) {
+ s += target.size();
+ }
+ targets.reserve(s);
+ for (const auto& i : targets_) {
+ std::transform(i.begin(),
+ i.end(),
+ std::back_inserter(targets),
+ [](auto const& target) { return target.first; });
+ }
+ std::sort(targets.begin(),
+ targets.end(),
+ [](auto const& lhs, auto const& rhs) {
+ return lhs.ToString() < rhs.ToString();
+ });
+ return targets;
+ }
+
+ template <bool kIncludeOrigins = false>
+ [[nodiscard]] auto ToResult() const -> ResultType<kIncludeOrigins> {
+ ResultType<kIncludeOrigins> result{};
+ size_t na = 0;
+ size_t nb = 0;
+ size_t nt = 0;
+ for (std::size_t i = 0; i < width_; i++) {
+ na += num_actions_[i];
+ nb += num_blobs_[i];
+ nt += num_trees_[i];
+ }
+ result.actions.reserve(na);
+ result.blobs.reserve(nb);
+ result.trees.reserve(nt);
+
+ std::unordered_map<
+ std::string,
+ std::vector<std::pair<ConfiguredTarget, std::size_t>>>
+ origin_map;
+ origin_map.reserve(na);
+ if constexpr (kIncludeOrigins) {
+ for (const auto& target : targets_) {
+ std::for_each(
+ target.begin(), target.end(), [&](auto const& el) {
+ auto const& actions = el.second->Actions();
+ std::size_t pos{};
+ std::for_each(
+ actions.begin(),
+ actions.end(),
+ [&origin_map, &pos, &el](auto const& action) {
+ std::pair<ConfiguredTarget, std::size_t> origin{
+ el.first, pos++};
+ auto id = action.Id();
+ if (origin_map.contains(id)) {
+ origin_map[id].push_back(origin);
+ }
+ else {
+ origin_map[id] =
+ std::vector<std::pair<ConfiguredTarget,
+ std::size_t>>{
+ origin};
+ }
+ });
+ });
+ }
+ // Sort origins to get a reproducible order. We don't expect many
+ // origins for a single action, so the cost of comparison is not
+ // too important. Moreover, we expect most actions to have a single
+ // origin, so any precomputation would be more expensive.
+ for (auto const& i : origin_map) {
+ std::sort(origin_map[i.first].begin(),
+ origin_map[i.first].end(),
+ [](auto const& left, auto const& right) {
+ auto left_target = left.first.ToString();
+ auto right_target = right.first.ToString();
+ return (left_target < right_target) ||
+ (left_target == right_target &&
+ left.second < right.second);
+ });
+ }
+ }
+
+ for (const auto& target : targets_) {
+ std::for_each(target.begin(), target.end(), [&](auto const& el) {
+ auto const& actions = el.second->Actions();
+ if constexpr (kIncludeOrigins) {
+ std::for_each(actions.begin(),
+ actions.end(),
+ [&result, &origin_map](auto const& action) {
+ auto origins = nlohmann::json::array();
+ for (auto const& [ct, count] :
+ origin_map[action.Id()]) {
+ origins.push_back(nlohmann::json{
+ {"target", ct.target.ToJson()},
+ {"subtask", count},
+ {"config", ct.config.ToJson()}});
+ }
+ result.actions.emplace_back(
+ ActionWithOrigin{action, origins});
+ });
+ }
+ else {
+ std::for_each(actions.begin(),
+ actions.end(),
+ [&result](auto const& action) {
+ result.actions.emplace_back(action);
+ });
+ }
+ auto const& blobs = el.second->Blobs();
+ auto const& trees = el.second->Trees();
+ result.blobs.insert(
+ result.blobs.end(), blobs.begin(), blobs.end());
+ result.trees.insert(
+ result.trees.end(), trees.begin(), trees.end());
+ });
+ }
+
+ std::sort(result.blobs.begin(), result.blobs.end());
+ auto lastblob = std::unique(result.blobs.begin(), result.blobs.end());
+ result.blobs.erase(lastblob, result.blobs.end());
+
+ std::sort(result.trees.begin(),
+ result.trees.end(),
+ [](auto left, auto right) { return left.Id() < right.Id(); });
+ auto lasttree = std::unique(
+ result.trees.begin(),
+ result.trees.end(),
+ [](auto left, auto right) { return left.Id() == right.Id(); });
+ result.trees.erase(lasttree, result.trees.end());
+
+ std::sort(result.actions.begin(),
+ result.actions.end(),
+ [](auto left, auto right) {
+ if constexpr (kIncludeOrigins) {
+ return left.desc.Id() < right.desc.Id();
+ }
+ else {
+ return left.Id() < right.Id();
+ }
+ });
+ auto lastaction =
+ std::unique(result.actions.begin(),
+ result.actions.end(),
+ [](auto left, auto right) {
+ if constexpr (kIncludeOrigins) {
+ return left.desc.Id() == right.desc.Id();
+ }
+ else {
+ return left.Id() == right.Id();
+ }
+ });
+ result.actions.erase(lastaction, result.actions.end());
+
+ return result;
+ }
+
+ template <bool kIncludeOrigins = false>
+ [[nodiscard]] auto ToJson() const -> nlohmann::json {
+ auto const result = ToResult<kIncludeOrigins>();
+ auto actions = nlohmann::json::object();
+ auto trees = nlohmann::json::object();
+ std::for_each(result.actions.begin(),
+ result.actions.end(),
+ [&actions](auto const& action) {
+ if constexpr (kIncludeOrigins) {
+ auto const& id = action.desc.GraphAction().Id();
+ actions[id] = action.desc.ToJson();
+ actions[id]["origins"] = action.origin;
+ }
+ else {
+ auto const& id = action.GraphAction().Id();
+ actions[id] = action.ToJson();
+ }
+ });
+ std::for_each(
+ result.trees.begin(),
+ result.trees.end(),
+ [&trees](auto const& tree) { trees[tree.Id()] = tree.ToJson(); });
+ return nlohmann::json{
+ {"actions", actions}, {"blobs", result.blobs}, {"trees", trees}};
+ }
+
+ template <bool kIncludeOrigins = true>
+ auto ToFile(std::string const& graph_file, int indent = 2) const -> void {
+ std::ofstream os(graph_file);
+ os << std::setw(indent) << ToJson<kIncludeOrigins>() << std::endl;
+ }
+
+ void Clear(gsl::not_null<TaskSystem*> const& ts) {
+ for (std::size_t i = 0; i < width_; ++i) {
+ ts->QueueTask([i, this]() { targets_[i].clear(); });
+ }
+ }
+
+ private:
+ constexpr static std::size_t kScalingFactor = 2;
+ std::size_t width_{ComputeWidth(0)};
+ std::vector<std::mutex> m_{width_};
+ std::vector<
+ std::unordered_map<ConfiguredTarget, gsl::not_null<AnalysedTargetPtr>>>
+ targets_{width_};
+ std::vector<std::size_t> num_actions_{std::vector<std::size_t>(width_)};
+ std::vector<std::size_t> num_blobs_{std::vector<std::size_t>(width_)};
+ std::vector<std::size_t> num_trees_{std::vector<std::size_t>(width_)};
+
+ constexpr static auto ComputeWidth(std::size_t jobs) -> std::size_t {
+ if (jobs <= 0) {
+ // Non-positive indicates to use the default value
+ return ComputeWidth(
+ std::max(1U, std::thread::hardware_concurrency()));
+ }
+ return jobs * kScalingFactor + 1;
+ }
+
+}; // namespace BuildMaps::Target
+
+} // namespace BuildMaps::Target
+
+#endif // INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_RESULT_MAP_HPP
diff --git a/src/buildtool/build_engine/target_map/target_map.cpp b/src/buildtool/build_engine/target_map/target_map.cpp
new file mode 100644
index 00000000..327dbe02
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/target_map.cpp
@@ -0,0 +1,1338 @@
+#include "src/buildtool/build_engine/target_map/target_map.hpp"
+
+#include <algorithm>
+#include <memory>
+#include <set>
+#include <string>
+#include <utility>
+
+#include "nlohmann/json.hpp"
+#include "src/buildtool/build_engine/base_maps/field_reader.hpp"
+#include "src/buildtool/build_engine/expression/configuration.hpp"
+#include "src/buildtool/build_engine/expression/evaluator.hpp"
+#include "src/buildtool/build_engine/expression/function_map.hpp"
+#include "src/buildtool/build_engine/target_map/built_in_rules.hpp"
+#include "src/buildtool/build_engine/target_map/utils.hpp"
+
+namespace {
+
+using namespace std::string_literals;
+
+[[nodiscard]] auto ReadActionOutputExpr(ExpressionPtr const& out_exp,
+ std::string const& field_name)
+ -> ActionDescription::outputs_t {
+ if (not out_exp->IsList()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("{} has to be a list of strings, but found {}",
+ field_name,
+ out_exp->ToString())};
+ }
+ ActionDescription::outputs_t outputs;
+ outputs.reserve(out_exp->List().size());
+ for (auto const& out_path : out_exp->List()) {
+ if (not out_path->IsString()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("{} has to be a list of strings, but found {}",
+ field_name,
+ out_exp->ToString())};
+ }
+ outputs.emplace_back(out_path->String());
+ }
+ return outputs;
+}
+
+struct TargetData {
+ using Ptr = std::shared_ptr<TargetData>;
+
+ std::vector<std::string> target_vars;
+ std::unordered_map<std::string, ExpressionPtr> config_exprs;
+ std::unordered_map<std::string, ExpressionPtr> string_exprs;
+ std::unordered_map<std::string, ExpressionPtr> target_exprs;
+ ExpressionPtr tainted_expr;
+ bool parse_target_names{};
+
+ TargetData(std::vector<std::string> target_vars,
+ std::unordered_map<std::string, ExpressionPtr> config_exprs,
+ std::unordered_map<std::string, ExpressionPtr> string_exprs,
+ std::unordered_map<std::string, ExpressionPtr> target_exprs,
+ ExpressionPtr tainted_expr,
+ bool parse_target_names)
+ : target_vars{std::move(target_vars)},
+ config_exprs{std::move(config_exprs)},
+ string_exprs{std::move(string_exprs)},
+ target_exprs{std::move(target_exprs)},
+ tainted_expr{std::move(tainted_expr)},
+ parse_target_names{parse_target_names} {}
+
+ [[nodiscard]] static auto FromFieldReader(
+ BuildMaps::Base::UserRulePtr const& rule,
+ BuildMaps::Base::FieldReader::Ptr const& desc) -> TargetData::Ptr {
+ desc->ExpectFields(rule->ExpectedFields());
+
+ auto target_vars = desc->ReadStringList("arguments_config");
+ auto tainted_expr =
+ desc->ReadOptionalExpression("tainted", Expression::kEmptyList);
+
+ auto convert_to_exprs =
+ [&desc](gsl::not_null<
+ std::unordered_map<std::string, ExpressionPtr>*> const&
+ expr_map,
+ std::vector<std::string> const& field_names) -> bool {
+ for (auto const& field_name : field_names) {
+ auto expr = desc->ReadOptionalExpression(
+ field_name, Expression::kEmptyList);
+ if (not expr) {
+ return false;
+ }
+ expr_map->emplace(field_name, std::move(expr));
+ }
+ return true;
+ };
+
+ std::unordered_map<std::string, ExpressionPtr> config_exprs;
+ std::unordered_map<std::string, ExpressionPtr> string_exprs;
+ std::unordered_map<std::string, ExpressionPtr> target_exprs;
+ if (target_vars and tainted_expr and
+ convert_to_exprs(&config_exprs, rule->ConfigFields()) and
+ convert_to_exprs(&string_exprs, rule->StringFields()) and
+ convert_to_exprs(&target_exprs, rule->TargetFields())) {
+ return std::make_shared<TargetData>(std::move(*target_vars),
+ std::move(config_exprs),
+ std::move(string_exprs),
+ std::move(target_exprs),
+ std::move(tainted_expr),
+ /*parse_target_names=*/true);
+ }
+ return nullptr;
+ }
+
+ [[nodiscard]] static auto FromTargetNode(
+ BuildMaps::Base::UserRulePtr const& rule,
+ TargetNode::Abstract const& node,
+ ExpressionPtr const& rule_map,
+ gsl::not_null<AsyncMapConsumerLoggerPtr> const& logger)
+ -> TargetData::Ptr {
+
+ auto const& string_fields = node.string_fields->Map();
+ auto const& target_fields = node.target_fields->Map();
+
+ std::unordered_map<std::string, ExpressionPtr> config_exprs;
+ std::unordered_map<std::string, ExpressionPtr> string_exprs;
+ std::unordered_map<std::string, ExpressionPtr> target_exprs;
+
+ for (auto const& field_name : rule->ConfigFields()) {
+ if (target_fields.Find(field_name)) {
+ (*logger)(
+ fmt::format(
+ "Expected config field '{}' in string_fields of "
+ "abstract node type '{}', and not in target_fields",
+ field_name,
+ node.node_type),
+ /*fatal=*/true);
+ return nullptr;
+ }
+ auto const& config_expr =
+ string_fields.Find(field_name)
+ .value_or(std::reference_wrapper{Expression::kEmptyList})
+ .get();
+ config_exprs.emplace(field_name, config_expr);
+ }
+
+ for (auto const& field_name : rule->StringFields()) {
+ if (target_fields.Find(field_name)) {
+ (*logger)(
+ fmt::format(
+ "Expected string field '{}' in string_fields of "
+ "abstract node type '{}', and not in target_fields",
+ field_name,
+ node.node_type),
+ /*fatal=*/true);
+ return nullptr;
+ }
+ auto const& string_expr =
+ string_fields.Find(field_name)
+ .value_or(std::reference_wrapper{Expression::kEmptyList})
+ .get();
+ string_exprs.emplace(field_name, string_expr);
+ }
+
+ for (auto const& field_name : rule->TargetFields()) {
+ if (string_fields.Find(field_name)) {
+ (*logger)(
+ fmt::format(
+ "Expected target field '{}' in target_fields of "
+ "abstract node type '{}', and not in string_fields",
+ field_name,
+ node.node_type),
+ /*fatal=*/true);
+ return nullptr;
+ }
+ auto const& target_expr =
+ target_fields.Find(field_name)
+ .value_or(std::reference_wrapper{Expression::kEmptyList})
+ .get();
+ auto const& nodes = target_expr->List();
+ Expression::list_t targets{};
+ targets.reserve(nodes.size());
+ for (auto const& node_expr : nodes) {
+ targets.emplace_back(ExpressionPtr{BuildMaps::Base::EntityName{
+ BuildMaps::Base::AnonymousTarget{rule_map, node_expr}}});
+ }
+ target_exprs.emplace(field_name, targets);
+ }
+
+ return std::make_shared<TargetData>(std::vector<std::string>{},
+ std::move(config_exprs),
+ std::move(string_exprs),
+ std::move(target_exprs),
+ Expression::kEmptyList,
+ /*parse_target_names=*/false);
+ }
+};
+
+void withDependencies(
+ const std::vector<BuildMaps::Target::ConfiguredTarget>& transition_keys,
+ const std::vector<AnalysedTargetPtr const*>& dependency_values,
+ const BuildMaps::Base::UserRulePtr& rule,
+ const TargetData::Ptr& data,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ std::unordered_map<std::string, ExpressionPtr> params,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*>& result_map) {
+ // Associate dependency keys with values
+ std::unordered_map<BuildMaps::Target::ConfiguredTarget, AnalysedTargetPtr>
+ deps_by_transition;
+ deps_by_transition.reserve(transition_keys.size());
+ for (size_t i = 0; i < transition_keys.size(); ++i) {
+ deps_by_transition.emplace(transition_keys[i], *dependency_values[i]);
+ }
+
+ // Compute the effective dependecy on config variables
+ std::unordered_set<std::string> effective_vars;
+ auto const& param_vars = data->target_vars;
+ effective_vars.insert(param_vars.begin(), param_vars.end());
+ auto const& config_vars = rule->ConfigVars();
+ effective_vars.insert(config_vars.begin(), config_vars.end());
+ for (auto const& [transition, target] : deps_by_transition) {
+ for (auto const& x : target->Vars()) {
+ if (not transition.config.VariableFixed(x)) {
+ effective_vars.insert(x);
+ }
+ }
+ }
+ auto effective_conf = key.config.Prune(effective_vars);
+
+ // Compute and verify taintedness
+ auto tainted = std::set<std::string>{};
+ auto got_tainted = BuildMaps::Target::Utils::getTainted(
+ &tainted, key.config.Prune(param_vars), data->tainted_expr, logger);
+ if (not got_tainted) {
+ return;
+ }
+ tainted.insert(rule->Tainted().begin(), rule->Tainted().end());
+ for (auto const& dep : dependency_values) {
+ if (not std::includes(tainted.begin(),
+ tainted.end(),
+ (*dep)->Tainted().begin(),
+ (*dep)->Tainted().end())) {
+ (*logger)(
+ "Not tainted with all strings the dependencies are tainted "
+ "with",
+ true);
+ return;
+ }
+ }
+
+ // Evaluate string parameters
+ auto string_fields_fcts =
+ FunctionMap::MakePtr(FunctionMap::underlying_map_t{
+ {"outs",
+ [&deps_by_transition, &key](
+ auto&& eval, auto const& expr, auto const& env) {
+ return BuildMaps::Target::Utils::keys_expr(
+ BuildMaps::Target::Utils::obtainTargetByName(
+ eval, expr, env, key.target, deps_by_transition)
+ ->Artifacts());
+ }},
+ {"runfiles",
+ [&deps_by_transition, &key](
+ auto&& eval, auto const& expr, auto const& env) {
+ return BuildMaps::Target::Utils::keys_expr(
+ BuildMaps::Target::Utils::obtainTargetByName(
+ eval, expr, env, key.target, deps_by_transition)
+ ->RunFiles());
+ }}});
+ auto param_config = key.config.Prune(param_vars);
+ params.reserve(params.size() + rule->StringFields().size());
+ for (auto const& field_name : rule->StringFields()) {
+ auto const& field_exp = data->string_exprs[field_name];
+ auto field_value = field_exp.Evaluate(
+ param_config,
+ string_fields_fcts,
+ [&logger, &field_name](auto const& msg) {
+ (*logger)(fmt::format("While evaluating string field {}:\n{}",
+ field_name,
+ msg),
+ true);
+ });
+ if (not field_value) {
+ return;
+ }
+ if (not field_value->IsList()) {
+ (*logger)(fmt::format("String field {} should be a list of "
+ "strings, but found {}",
+ field_name,
+ field_value->ToString()),
+ true);
+ return;
+ }
+ for (auto const& entry : field_value->List()) {
+ if (not entry->IsString()) {
+ (*logger)(fmt::format("String field {} should be a list of "
+ "strings, but found entry {}",
+ field_name,
+ entry->ToString()),
+ true);
+ return;
+ }
+ }
+ params.emplace(field_name, std::move(field_value));
+ }
+
+ // Evaluate main expression
+ auto expression_config = key.config.Prune(config_vars);
+ std::vector<ActionDescription> actions{};
+ std::vector<std::string> blobs{};
+ std::vector<Tree> trees{};
+ auto main_exp_fcts = FunctionMap::MakePtr(
+ {{"FIELD",
+ [&params](auto&& eval, auto const& expr, auto const& env) {
+ auto name = eval(expr["name"], env);
+ if (not name->IsString()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("FIELD argument 'name' should evaluate to a "
+ "string, but got {}",
+ name->ToString())};
+ }
+ auto it = params.find(name->String());
+ if (it == params.end()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("FIELD '{}' unknown", name->String())};
+ }
+ return it->second;
+ }},
+ {"DEP_ARTIFACTS",
+ [&deps_by_transition](
+ auto&& eval, auto const& expr, auto const& env) {
+ return BuildMaps::Target::Utils::obtainTarget(
+ eval, expr, env, deps_by_transition)
+ ->Artifacts();
+ }},
+ {"DEP_RUNFILES",
+ [&deps_by_transition](
+ auto&& eval, auto const& expr, auto const& env) {
+ return BuildMaps::Target::Utils::obtainTarget(
+ eval, expr, env, deps_by_transition)
+ ->RunFiles();
+ }},
+ {"DEP_PROVIDES",
+ [&deps_by_transition](
+ auto&& eval, auto const& expr, auto const& env) {
+ auto const& provided = BuildMaps::Target::Utils::obtainTarget(
+ eval, expr, env, deps_by_transition)
+ ->Provides();
+ auto provider = eval(expr["provider"], env);
+ auto provided_value = provided->At(provider->String());
+ if (provided_value) {
+ return provided_value->get();
+ }
+ auto const& empty_list = Expression::kEmptyList;
+ return eval(expr->Get("default", empty_list), env);
+ }},
+ {"ACTION",
+ [&actions, &rule](auto&& eval, auto const& expr, auto const& env) {
+ auto const& empty_map_exp = Expression::kEmptyMapExpr;
+ auto inputs_exp = eval(expr->Get("inputs", empty_map_exp), env);
+ if (not inputs_exp->IsMap()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "inputs has to be a map of artifacts, but found {}",
+ inputs_exp->ToString())};
+ }
+ for (auto const& [input_path, artifact] : inputs_exp->Map()) {
+ if (not artifact->IsArtifact()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("inputs has to be a map of Artifacts, "
+ "but found {} for {}",
+ artifact->ToString(),
+ input_path)};
+ }
+ }
+ auto conflict =
+ BuildMaps::Target::Utils::tree_conflict(inputs_exp);
+ if (conflict) {
+ throw Evaluator::EvaluationError{
+ fmt::format("inputs conflicts on subtree {}", *conflict)};
+ }
+
+ Expression::map_t::underlying_map_t result;
+ auto outputs = ReadActionOutputExpr(
+ eval(expr->Get("outs", Expression::list_t{}), env), "outs");
+ auto output_dirs = ReadActionOutputExpr(
+ eval(expr->Get("out_dirs", Expression::list_t{}), env),
+ "out_dirs");
+ if (outputs.empty() and output_dirs.empty()) {
+ throw Evaluator::EvaluationError{
+ "either outs or out_dirs must be specified for ACTION"};
+ }
+
+ std::sort(outputs.begin(), outputs.end());
+ std::sort(output_dirs.begin(), output_dirs.end());
+ std::vector<std::string> dups{};
+ std::set_intersection(outputs.begin(),
+ outputs.end(),
+ output_dirs.begin(),
+ output_dirs.end(),
+ std::back_inserter(dups));
+ if (not dups.empty()) {
+ throw Evaluator::EvaluationError{
+ "outs and out_dirs for ACTION must be disjoint"};
+ }
+
+ std::vector<std::string> cmd;
+ auto cmd_exp = eval(expr->Get("cmd", Expression::list_t{}), env);
+ if (not cmd_exp->IsList()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "cmd has to be a list of strings, but found {}",
+ cmd_exp->ToString())};
+ }
+ if (cmd_exp->List().empty()) {
+ throw Evaluator::EvaluationError{
+ "cmd must not be an empty list"};
+ }
+ cmd.reserve(cmd_exp->List().size());
+ for (auto const& arg : cmd_exp->List()) {
+ if (not arg->IsString()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "cmd has to be a list of strings, but found {}",
+ cmd_exp->ToString())};
+ }
+ cmd.emplace_back(arg->String());
+ }
+ auto env_exp = eval(expr->Get("env", empty_map_exp), env);
+ if (not env_exp->IsMap()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("env has to be a map of string, but found {}",
+ env_exp->ToString())};
+ }
+ for (auto const& [env_var, env_value] : env_exp->Map()) {
+ if (not env_value->IsString()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "env has to be a map of string, but found {}",
+ env_exp->ToString())};
+ }
+ }
+ auto may_fail_exp = expr->Get("may_fail", Expression::list_t{});
+ if (not may_fail_exp->IsList()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("may_fail has to be a list of "
+ "strings, but found {}",
+ may_fail_exp->ToString())};
+ }
+ for (auto const& entry : may_fail_exp->List()) {
+ if (not entry->IsString()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("may_fail has to be a list of "
+ "strings, but found {}",
+ may_fail_exp->ToString())};
+ }
+ if (rule->Tainted().find(entry->String()) ==
+ rule->Tainted().end()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("may_fail contains entry {} the the rule "
+ "is not tainted with",
+ entry->ToString())};
+ }
+ }
+ std::optional<std::string> may_fail = std::nullopt;
+ if (not may_fail_exp->List().empty()) {
+ auto fail_msg =
+ eval(expr->Get("fail_message", "action failed"s), env);
+ if (not fail_msg->IsString()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "fail_message has to evalute to a string, but got {}",
+ fail_msg->ToString())};
+ }
+ may_fail = std::optional{fail_msg->String()};
+ }
+ auto no_cache_exp = expr->Get("no_cache", Expression::list_t{});
+ if (not no_cache_exp->IsList()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("no_cache has to be a list of"
+ "strings, but found {}",
+ no_cache_exp->ToString())};
+ }
+ for (auto const& entry : no_cache_exp->List()) {
+ if (not entry->IsString()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("no_cache has to be a list of"
+ "strings, but found {}",
+ no_cache_exp->ToString())};
+ }
+ if (rule->Tainted().find(entry->String()) ==
+ rule->Tainted().end()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("no_cache contains entry {} the the rule "
+ "is not tainted with",
+ entry->ToString())};
+ }
+ }
+ bool no_cache = not no_cache_exp->List().empty();
+ auto action =
+ BuildMaps::Target::Utils::createAction(outputs,
+ output_dirs,
+ std::move(cmd),
+ env_exp,
+ may_fail,
+ no_cache,
+ inputs_exp);
+ auto action_id = action.Id();
+ actions.emplace_back(std::move(action));
+ for (auto const& out : outputs) {
+ result.emplace(out,
+ ExpressionPtr{ArtifactDescription{
+ action_id, std::filesystem::path{out}}});
+ }
+ for (auto const& out : output_dirs) {
+ result.emplace(out,
+ ExpressionPtr{ArtifactDescription{
+ action_id, std::filesystem::path{out}}});
+ }
+
+ return ExpressionPtr{Expression::map_t{result}};
+ }},
+ {"BLOB",
+ [&blobs](auto&& eval, auto const& expr, auto const& env) {
+ auto data = eval(expr->Get("data", ""s), env);
+ if (not data->IsString()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("BLOB data has to be a string, but got {}",
+ data->ToString())};
+ }
+ blobs.emplace_back(data->String());
+ return ExpressionPtr{ArtifactDescription{
+ {ComputeHash(data->String()), data->String().size()},
+ ObjectType::File}};
+ }},
+ {"TREE",
+ [&trees](auto&& eval, auto const& expr, auto const& env) {
+ auto val = eval(expr->Get("$1", Expression::kEmptyMapExpr), env);
+ if (not val->IsMap()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("TREE argument has to be a map of artifacts, "
+ "but found {}",
+ val->ToString())};
+ }
+ std::unordered_map<std::string, ArtifactDescription> artifacts;
+ artifacts.reserve(val->Map().size());
+ for (auto const& [input_path, artifact] : val->Map()) {
+ if (not artifact->IsArtifact()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "TREE argument has to be a map of artifacts, "
+ "but found {} for {}",
+ artifact->ToString(),
+ input_path)};
+ }
+ auto norm_path = std::filesystem::path{input_path}
+ .lexically_normal()
+ .string();
+ if (norm_path == "." or norm_path.empty()) {
+ if (val->Map().size() > 1) {
+ throw Evaluator::EvaluationError{
+ "input path '.' or '' for TREE is only allowed "
+ "for trees with single input artifact"};
+ }
+ if (not artifact->Artifact().IsTree()) {
+ throw Evaluator::EvaluationError{
+ "input path '.' or '' for TREE must be tree "
+ "artifact"};
+ }
+ return artifact;
+ }
+ artifacts.emplace(std::move(norm_path), artifact->Artifact());
+ }
+ auto conflict = BuildMaps::Target::Utils::tree_conflict(val);
+ if (conflict) {
+ throw Evaluator::EvaluationError{
+ fmt::format("TREE conflicts on subtree {}", *conflict)};
+ }
+ auto tree = Tree{std::move(artifacts)};
+ auto tree_id = tree.Id();
+ trees.emplace_back(std::move(tree));
+ return ExpressionPtr{ArtifactDescription{tree_id}};
+ }},
+ {"VALUE_NODE",
+ [](auto&& eval, auto const& expr, auto const& env) {
+ auto val = eval(expr->Get("$1", Expression::kNone), env);
+ if (not val->IsResult()) {
+ throw Evaluator::EvaluationError{
+ "argument '$1' for VALUE_NODE not a RESULT type."};
+ }
+ return ExpressionPtr{TargetNode{std::move(val)}};
+ }},
+ {"ABSTRACT_NODE",
+ [](auto&& eval, auto const& expr, auto const& env) {
+ auto type = eval(expr->Get("node_type", Expression::kNone), env);
+ if (not type->IsString()) {
+ throw Evaluator::EvaluationError{
+ "argument 'node_type' for ABSTRACT_NODE not a string."};
+ }
+ auto string_fields = eval(
+ expr->Get("string_fields", Expression::kEmptyMapExpr), env);
+ if (not string_fields->IsMap()) {
+ throw Evaluator::EvaluationError{
+ "argument 'string_fields' for ABSTRACT_NODE not a map."};
+ }
+ auto target_fields = eval(
+ expr->Get("target_fields", Expression::kEmptyMapExpr), env);
+ if (not target_fields->IsMap()) {
+ throw Evaluator::EvaluationError{
+ "argument 'target_fields' for ABSTRACT_NODE not a map."};
+ }
+
+ std::optional<std::string> dup_key{std::nullopt};
+ auto check_entries =
+ [&dup_key](auto const& map,
+ auto const& type_check,
+ std::string const& fields_name,
+ std::string const& type_name,
+ std::optional<ExpressionPtr> const& disjoint_map =
+ std::nullopt) {
+ for (auto const& [key, list] : map->Map()) {
+ if (not list->IsList()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "value for key {} in argument '{}' for "
+ "ABSTRACT_NODE is not a list.",
+ key,
+ fields_name)};
+ }
+ for (auto const& entry : list->List()) {
+ if (not type_check(entry)) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "list entry for {} in argument '{}' for "
+ "ABSTRACT_NODE is not a {}:\n{}",
+ key,
+ fields_name,
+ type_name,
+ entry->ToString())};
+ }
+ }
+ if (disjoint_map) {
+ if ((*disjoint_map)->Map().Find(key)) {
+ dup_key = key;
+ return;
+ }
+ }
+ }
+ };
+
+ auto is_string = [](auto const& e) { return e->IsString(); };
+ check_entries(string_fields,
+ is_string,
+ "string_fields",
+ "string",
+ target_fields);
+ if (dup_key) {
+ throw Evaluator::EvaluationError{
+ fmt::format("string_fields and target_fields are not "
+ "disjoint maps, found duplicate key: {}.",
+ *dup_key)};
+ }
+
+ auto is_node = [](auto const& e) { return e->IsNode(); };
+ check_entries(
+ target_fields, is_node, "target_fields", "target node");
+
+ return ExpressionPtr{
+ TargetNode{TargetNode::Abstract{type->String(),
+ std::move(string_fields),
+ std::move(target_fields)}}};
+ }},
+ {"RESULT", [](auto&& eval, auto const& expr, auto const& env) {
+ auto const& empty_map_exp = Expression::kEmptyMapExpr;
+ auto artifacts = eval(expr->Get("artifacts", empty_map_exp), env);
+ auto runfiles = eval(expr->Get("runfiles", empty_map_exp), env);
+ auto provides = eval(expr->Get("provides", empty_map_exp), env);
+ if (not artifacts->IsMap()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "artifacts has to be a map of artifacts, but found {}",
+ artifacts->ToString())};
+ }
+ for (auto const& [path, entry] : artifacts->Map()) {
+ if (not entry->IsArtifact()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("artifacts has to be a map of artifacts, "
+ "but found {} for {}",
+ entry->ToString(),
+ path)};
+ }
+ }
+ if (not runfiles->IsMap()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "runfiles has to be a map of artifacts, but found {}",
+ runfiles->ToString())};
+ }
+ for (auto const& [path, entry] : runfiles->Map()) {
+ if (not entry->IsArtifact()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("runfiles has to be a map of artifacts, "
+ "but found {} for {}",
+ entry->ToString(),
+ path)};
+ }
+ }
+ if (not provides->IsMap()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("provides has to be a map, but found {}",
+ provides->ToString())};
+ }
+ return ExpressionPtr{TargetResult{artifacts, provides, runfiles}};
+ }}});
+
+ auto result = rule->Expression()->Evaluate(
+ expression_config, main_exp_fcts, [logger](auto const& msg) {
+ (*logger)(
+ fmt::format("While evaluating defining expression of rule:\n{}",
+ msg),
+ true);
+ });
+ if (not result) {
+ return;
+ }
+ if (not result->IsResult()) {
+ (*logger)(fmt::format("Defining expression should evaluate to a "
+ "RESULT, but got: {}",
+ result->ToString()),
+ true);
+ return;
+ }
+ auto analysis_result =
+ std::make_shared<AnalysedTarget>((*std::move(result)).Result(),
+ std::move(actions),
+ std::move(blobs),
+ std::move(trees),
+ std::move(effective_vars),
+ std::move(tainted));
+ analysis_result =
+ result_map->Add(key.target, effective_conf, std::move(analysis_result));
+ (*setter)(std::move(analysis_result));
+}
+
+[[nodiscard]] auto isTransition(
+ const ExpressionPtr& ptr,
+ std::function<void(std::string const&)> const& logger) -> bool {
+ if (not ptr->IsList()) {
+ logger(fmt::format("expected list, but got {}", ptr->ToString()));
+ return false;
+ }
+ for (const auto& entry : ptr->List()) {
+ if (not entry->IsMap()) {
+ logger(fmt::format("expected list of dicts, but found {}",
+ ptr->ToString()));
+ return false;
+ }
+ }
+
+ return true;
+}
+
+void withRuleDefinition(
+ const BuildMaps::Base::UserRulePtr& rule,
+ const TargetData::Ptr& data,
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*> result_map) {
+ auto param_config = key.config.Prune(data->target_vars);
+
+ // Evaluate the config_fields
+
+ std::unordered_map<std::string, ExpressionPtr> params;
+ params.reserve(rule->ConfigFields().size() + rule->TargetFields().size() +
+ rule->ImplicitTargetExps().size());
+ for (auto field_name : rule->ConfigFields()) {
+ auto const& field_expression = data->config_exprs[field_name];
+ auto field_value = field_expression.Evaluate(
+ param_config, {}, [&logger, &field_name](auto const& msg) {
+ (*logger)(fmt::format("While evaluating config fieled {}:\n{}",
+ field_name,
+ msg),
+ true);
+ });
+ if (not field_value) {
+ return;
+ }
+ if (not field_value->IsList()) {
+ (*logger)(fmt::format("Config field {} should evaluate to a list "
+ "of strings, but got{}",
+ field_name,
+ field_value->ToString()),
+ true);
+ return;
+ }
+ for (auto const& entry : field_value->List()) {
+ if (not entry->IsString()) {
+ (*logger)(fmt::format("Config field {} should evaluate to a "
+ "list of strings, but got{}",
+ field_name,
+ field_value->ToString()),
+ true);
+ return;
+ }
+ }
+ params.emplace(field_name, field_value);
+ }
+
+ // Evaluate config transitions
+
+ auto config_trans_fcts = FunctionMap::MakePtr(
+ "FIELD", [&params](auto&& eval, auto const& expr, auto const& env) {
+ auto name = eval(expr["name"], env);
+ if (not name->IsString()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("FIELD argument 'name' should evaluate to a "
+ "string, but got {}",
+ name->ToString())};
+ }
+ auto it = params.find(name->String());
+ if (it == params.end()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("FIELD {} unknown", name->String())};
+ }
+ return it->second;
+ });
+
+ auto const& config_vars = rule->ConfigVars();
+ auto expression_config = key.config.Prune(config_vars);
+
+ std::unordered_map<std::string, ExpressionPtr> config_transitions;
+ config_transitions.reserve(rule->TargetFields().size() +
+ rule->ImplicitTargets().size() +
+ rule->AnonymousDefinitions().size());
+ for (auto const& target_field_name : rule->TargetFields()) {
+ auto exp = rule->ConfigTransitions().at(target_field_name);
+ auto transition_logger = [&logger,
+ &target_field_name](auto const& msg) {
+ (*logger)(
+ fmt::format("While evaluating config transition for {}:\n{}",
+ target_field_name,
+ msg),
+ true);
+ };
+ auto transition = exp->Evaluate(
+ expression_config, config_trans_fcts, transition_logger);
+ if (not transition) {
+ return;
+ }
+ if (not isTransition(transition, transition_logger)) {
+ return;
+ }
+ config_transitions.emplace(target_field_name, transition);
+ }
+ for (const auto& name_value : rule->ImplicitTargets()) {
+ auto implicit_field_name = name_value.first;
+ auto exp = rule->ConfigTransitions().at(implicit_field_name);
+ auto transition_logger = [&logger,
+ &implicit_field_name](auto const& msg) {
+ (*logger)(fmt::format("While evaluating config transition for "
+ "implicit {}:\n{}",
+ implicit_field_name,
+ msg),
+ true);
+ };
+ auto transition = exp->Evaluate(
+ expression_config, config_trans_fcts, transition_logger);
+ if (not transition) {
+ return;
+ }
+ if (not isTransition(transition, transition_logger)) {
+ return;
+ }
+ config_transitions.emplace(implicit_field_name, transition);
+ }
+ for (const auto& entry : rule->AnonymousDefinitions()) {
+ auto const& anon_field_name = entry.first;
+ auto exp = rule->ConfigTransitions().at(anon_field_name);
+ auto transition_logger = [&logger, &anon_field_name](auto const& msg) {
+ (*logger)(fmt::format("While evaluating config transition for "
+ "anonymous {}:\n{}",
+ anon_field_name,
+ msg),
+ true);
+ };
+ auto transition = exp->Evaluate(
+ expression_config, config_trans_fcts, transition_logger);
+ if (not transition) {
+ return;
+ }
+ if (not isTransition(transition, transition_logger)) {
+ return;
+ }
+ config_transitions.emplace(anon_field_name, transition);
+ }
+
+ // Request dependencies
+
+ std::unordered_map<std::string, std::vector<std::size_t>> anon_positions;
+ anon_positions.reserve(rule->AnonymousDefinitions().size());
+ for (auto const& [_, def] : rule->AnonymousDefinitions()) {
+ anon_positions.emplace(def.target, std::vector<std::size_t>{});
+ }
+
+ std::vector<BuildMaps::Target::ConfiguredTarget> dependency_keys;
+ std::vector<BuildMaps::Target::ConfiguredTarget> transition_keys;
+ for (auto target_field_name : rule->TargetFields()) {
+ auto const& deps_expression = data->target_exprs[target_field_name];
+ auto deps_names = deps_expression.Evaluate(
+ param_config, {}, [logger, target_field_name](auto const& msg) {
+ (*logger)(
+ fmt::format("While evaluating target parameter {}:\n{}",
+ target_field_name,
+ msg),
+ true);
+ });
+ if (not deps_names->IsList()) {
+ (*logger)(fmt::format("Target parameter {} should evaluate to a "
+ "list, but got {}",
+ target_field_name,
+ deps_names->ToString()),
+ true);
+ return;
+ }
+ Expression::list_t dep_target_exps;
+ if (data->parse_target_names) {
+ dep_target_exps.reserve(deps_names->List().size());
+ for (const auto& dep_name : deps_names->List()) {
+ auto target = BuildMaps::Base::ParseEntityNameFromExpression(
+ dep_name,
+ key.target,
+ [&logger, &target_field_name, &dep_name](
+ std::string const& parse_err) {
+ (*logger)(fmt::format("Parsing entry {} in target "
+ "field {} failed with:\n{}",
+ dep_name->ToString(),
+ target_field_name,
+ parse_err),
+ true);
+ });
+ if (not target) {
+ return;
+ }
+ dep_target_exps.emplace_back(ExpressionPtr{*target});
+ }
+ }
+ else {
+ dep_target_exps = deps_names->List();
+ }
+ auto anon_pos = anon_positions.find(target_field_name);
+ auto const& transitions = config_transitions[target_field_name]->List();
+ for (const auto& transition : transitions) {
+ auto transitioned_config = key.config.Update(transition);
+ for (const auto& dep : dep_target_exps) {
+ if (anon_pos != anon_positions.end()) {
+ anon_pos->second.emplace_back(dependency_keys.size());
+ }
+
+ dependency_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{dep->Name(),
+ transitioned_config});
+ transition_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{
+ dep->Name(), Configuration{transition}});
+ }
+ }
+ params.emplace(target_field_name,
+ ExpressionPtr{std::move(dep_target_exps)});
+ }
+ for (auto const& [implicit_field_name, implicit_target] :
+ rule->ImplicitTargets()) {
+ auto anon_pos = anon_positions.find(implicit_field_name);
+ auto transitions = config_transitions[implicit_field_name]->List();
+ for (const auto& transition : transitions) {
+ auto transitioned_config = key.config.Update(transition);
+ for (const auto& dep : implicit_target) {
+ if (anon_pos != anon_positions.end()) {
+ anon_pos->second.emplace_back(dependency_keys.size());
+ }
+
+ dependency_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{dep,
+ transitioned_config});
+ transition_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{
+ dep, Configuration{transition}});
+ }
+ }
+ }
+ params.insert(rule->ImplicitTargetExps().begin(),
+ rule->ImplicitTargetExps().end());
+
+ (*subcaller)(
+ dependency_keys,
+ [transition_keys = std::move(transition_keys),
+ rule,
+ data,
+ key,
+ params = std::move(params),
+ setter,
+ logger,
+ result_map,
+ subcaller,
+ config_transitions = std::move(config_transitions),
+ anon_positions =
+ std::move(anon_positions)](auto const& values) mutable {
+ // Now that all non-anonymous targets have been evaluated we can
+ // read their provides map to construct and evaluate anonymous
+ // targets.
+ std::vector<BuildMaps::Target::ConfiguredTarget> anonymous_keys;
+ for (auto const& [name, def] : rule->AnonymousDefinitions()) {
+ Expression::list_t anon_names{};
+ for (auto pos : anon_positions.at(def.target)) {
+ auto const& provider_value =
+ (*values[pos])->Provides()->Map().Find(def.provider);
+ if (not provider_value) {
+ (*logger)(
+ fmt::format("Provider {} in {} does not exist",
+ def.provider,
+ def.target),
+ true);
+ return;
+ }
+ auto const& exprs = provider_value->get();
+ if (not exprs->IsList()) {
+ (*logger)(fmt::format("Provider {} in {} must be list "
+ "of target nodes but found: {}",
+ def.provider,
+ def.target,
+ exprs->ToString()),
+ true);
+ return;
+ }
+
+ auto const& list = exprs->List();
+ anon_names.reserve(anon_names.size() + list.size());
+ for (auto const& node : list) {
+ if (not node->IsNode()) {
+ (*logger)(
+ fmt::format("Entry in provider {} in {} must "
+ "be target node but found: {}",
+ def.provider,
+ def.target,
+ node->ToString()),
+ true);
+ return;
+ }
+ anon_names.emplace_back(BuildMaps::Base::EntityName{
+ BuildMaps::Base::AnonymousTarget{def.rule_map,
+ node}});
+ }
+ }
+
+ for (const auto& transition :
+ config_transitions.at(name)->List()) {
+ auto transitioned_config = key.config.Update(transition);
+ for (auto const& anon : anon_names) {
+ anonymous_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{
+ anon->Name(), transitioned_config});
+
+ transition_keys.emplace_back(
+ BuildMaps::Target::ConfiguredTarget{
+ anon->Name(), Configuration{transition}});
+ }
+ }
+
+ params.emplace(name, ExpressionPtr{std::move(anon_names)});
+ }
+ (*subcaller)(
+ anonymous_keys,
+ [dependency_values = values,
+ transition_keys = std::move(transition_keys),
+ rule,
+ data,
+ key,
+ params = std::move(params),
+ setter,
+ logger,
+ result_map](auto const& values) mutable {
+ // Join dependency values and anonymous values
+ dependency_values.insert(
+ dependency_values.end(), values.begin(), values.end());
+ withDependencies(transition_keys,
+ dependency_values,
+ rule,
+ data,
+ key,
+ params,
+ setter,
+ logger,
+ result_map);
+ },
+ logger);
+ },
+ logger);
+}
+
+void withTargetsFile(
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const nlohmann::json& targets_file,
+ const gsl::not_null<BuildMaps::Base::SourceTargetMap*>& source_target,
+ const gsl::not_null<BuildMaps::Base::UserRuleMap*>& rule_map,
+ const gsl::not_null<TaskSystem*>& ts,
+ const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*> result_map) {
+ auto desc_it = targets_file.find(key.target.name);
+ if (desc_it == targets_file.end()) {
+ // Not a defined taraget, treat as source target
+ source_target->ConsumeAfterKeysReady(
+ ts,
+ {key.target},
+ [setter](auto values) { (*setter)(AnalysedTargetPtr{*values[0]}); },
+ [logger, target = key.target](auto const& msg, auto fatal) {
+ (*logger)(fmt::format("While analysing target {} as implicit "
+ "source target:\n{}",
+ target.ToString(),
+ msg),
+ fatal);
+ });
+ }
+ else {
+ nlohmann::json desc = *desc_it;
+ auto rule_it = desc.find("type");
+ if (rule_it == desc.end()) {
+ (*logger)(
+ fmt::format("No type specified in the definition of target {}",
+ key.target.ToString()),
+ true);
+ return;
+ }
+ // Handle built-in rule, if it is
+ auto handled_as_builtin = BuildMaps::Target::HandleBuiltin(
+ *rule_it, desc, key, subcaller, setter, logger, result_map);
+ if (handled_as_builtin) {
+ return;
+ }
+
+ // Not a built-in rule, so has to be a user rule
+ auto rule_name = BuildMaps::Base::ParseEntityNameFromJson(
+ *rule_it,
+ key.target,
+ [&logger, &rule_it, &key](std::string const& parse_err) {
+ (*logger)(fmt::format("Parsing rule name {} for target {} "
+ "failed with:\n{}",
+ rule_it->dump(),
+ key.target.ToString(),
+ parse_err),
+ true);
+ });
+ if (not rule_name) {
+ return;
+ }
+ auto desc_reader = BuildMaps::Base::FieldReader::CreatePtr(
+ desc,
+ key.target,
+ fmt::format("{} target", rule_name->ToString()),
+ logger);
+ if (not desc_reader) {
+ return;
+ }
+ rule_map->ConsumeAfterKeysReady(
+ ts,
+ {*rule_name},
+ [desc = std::move(desc_reader),
+ subcaller,
+ setter,
+ logger,
+ key,
+ result_map,
+ rn = *rule_name](auto values) {
+ auto data = TargetData::FromFieldReader(*values[0], desc);
+ if (not data) {
+ (*logger)(fmt::format("Failed to read data from target {} "
+ "with rule {}",
+ key.target.ToString(),
+ rn.ToString()),
+ /*fatal=*/true);
+ return;
+ }
+ withRuleDefinition(
+ *values[0],
+ data,
+ key,
+ subcaller,
+ setter,
+ std::make_shared<AsyncMapConsumerLogger>(
+ [logger, target = key.target, rn](auto const& msg,
+ auto fatal) {
+ (*logger)(
+ fmt::format("While analysing {} target {}:\n{}",
+ rn.ToString(),
+ target.ToString(),
+ msg),
+ fatal);
+ }),
+ result_map);
+ },
+ [logger, target = key.target](auto const& msg, auto fatal) {
+ (*logger)(fmt::format("While looking up rule for {}:\n{}",
+ target.ToString(),
+ msg),
+ fatal);
+ });
+ }
+}
+
+void withTargetNode(
+ const BuildMaps::Target::ConfiguredTarget& key,
+ const gsl::not_null<BuildMaps::Base::UserRuleMap*>& rule_map,
+ const gsl::not_null<TaskSystem*>& ts,
+ const BuildMaps::Target::TargetMap::SubCallerPtr& subcaller,
+ const BuildMaps::Target::TargetMap::SetterPtr& setter,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger,
+ const gsl::not_null<BuildMaps::Target::ResultTargetMap*> result_map) {
+ auto const& target_node = key.target.anonymous->target_node->Node();
+ auto const& rule_mapping = key.target.anonymous->rule_map->Map();
+ if (target_node.IsValue()) {
+ // fixed value node, create analysed target from result
+ auto const& val = target_node.GetValue();
+ (*setter)(std::make_shared<AnalysedTarget>(
+ AnalysedTarget{val->Result(), {}, {}, {}, {}, {}}));
+ }
+ else {
+ // abstract target node, lookup rule and instantiate target
+ auto const& abs = target_node.GetAbstract();
+ auto rule_name = rule_mapping.Find(abs.node_type);
+ if (not rule_name) {
+ (*logger)(fmt::format("Cannot resolve type of node {} via rule map "
+ "{}",
+ target_node.ToString(),
+ key.target.anonymous->rule_map->ToString()),
+ /*fatal=*/true);
+ }
+ rule_map->ConsumeAfterKeysReady(
+ ts,
+ {rule_name->get()->Name()},
+ [abs,
+ subcaller,
+ setter,
+ logger,
+ key,
+ result_map,
+ rn = rule_name->get()](auto values) {
+ auto data = TargetData::FromTargetNode(
+ *values[0], abs, key.target.anonymous->rule_map, logger);
+ if (not data) {
+ (*logger)(fmt::format("Failed to read data from target {} "
+ "with rule {}",
+ key.target.ToString(),
+ rn->ToString()),
+ /*fatal=*/true);
+ return;
+ }
+ withRuleDefinition(*values[0],
+ data,
+ key,
+ subcaller,
+ setter,
+ std::make_shared<AsyncMapConsumerLogger>(
+ [logger, target = key.target, rn](
+ auto const& msg, auto fatal) {
+ (*logger)(
+ fmt::format("While analysing {} "
+ "target {}:\n{}",
+ rn->ToString(),
+ target.ToString(),
+ msg),
+ fatal);
+ }),
+ result_map);
+ },
+ [logger, target = key.target](auto const& msg, auto fatal) {
+ (*logger)(fmt::format("While looking up rule for {}:\n{}",
+ target.ToString(),
+ msg),
+ fatal);
+ });
+ }
+}
+
+} // namespace
+
+namespace BuildMaps::Target {
+auto CreateTargetMap(
+ const gsl::not_null<BuildMaps::Base::SourceTargetMap*>& source_target_map,
+ const gsl::not_null<BuildMaps::Base::TargetsFileMap*>& targets_file_map,
+ const gsl::not_null<BuildMaps::Base::UserRuleMap*>& rule_map,
+ const gsl::not_null<ResultTargetMap*>& result_map,
+ std::size_t jobs) -> TargetMap {
+ auto target_reader =
+ [source_target_map, targets_file_map, rule_map, result_map](
+ auto ts, auto setter, auto logger, auto subcaller, auto key) {
+ if (key.target.explicit_file_reference) {
+ // Not a defined target, treat as source target
+ source_target_map->ConsumeAfterKeysReady(
+ ts,
+ {key.target},
+ [setter](auto values) {
+ (*setter)(AnalysedTargetPtr{*values[0]});
+ },
+ [logger, target = key.target](auto const& msg, auto fatal) {
+ (*logger)(fmt::format("While analysing target {} as "
+ "explicit source target:\n{}",
+ target.ToString(),
+ msg),
+ fatal);
+ });
+ }
+ else if (key.target.IsAnonymousTarget()) {
+ withTargetNode(
+ key, rule_map, ts, subcaller, setter, logger, result_map);
+ }
+ else {
+ targets_file_map->ConsumeAfterKeysReady(
+ ts,
+ {key.target.ToModule()},
+ [key,
+ source_target_map,
+ rule_map,
+ ts,
+ subcaller = std::move(subcaller),
+ setter = std::move(setter),
+ logger,
+ result_map](auto values) {
+ withTargetsFile(key,
+ *values[0],
+ source_target_map,
+ rule_map,
+ ts,
+ subcaller,
+ setter,
+ logger,
+ result_map);
+ },
+ [logger, target = key.target](auto const& msg, auto fatal) {
+ (*logger)(fmt::format("While searching targets "
+ "description for {}:\n{}",
+ target.ToString(),
+ msg),
+ fatal);
+ });
+ }
+ };
+ return AsyncMapConsumer<ConfiguredTarget, AnalysedTargetPtr>(target_reader,
+ jobs);
+}
+} // namespace BuildMaps::Target
diff --git a/src/buildtool/build_engine/target_map/target_map.hpp b/src/buildtool/build_engine/target_map/target_map.hpp
new file mode 100644
index 00000000..4befc842
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/target_map.hpp
@@ -0,0 +1,27 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_TARGET_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_TARGET_MAP_HPP
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/analysed_target/analysed_target.hpp"
+#include "src/buildtool/build_engine/base_maps/rule_map.hpp"
+#include "src/buildtool/build_engine/base_maps/source_map.hpp"
+#include "src/buildtool/build_engine/base_maps/targets_file_map.hpp"
+#include "src/buildtool/build_engine/target_map/configured_target.hpp"
+#include "src/buildtool/build_engine/target_map/result_map.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+
+namespace BuildMaps::Target {
+
+using TargetMap = AsyncMapConsumer<ConfiguredTarget, AnalysedTargetPtr>;
+
+auto CreateTargetMap(const gsl::not_null<BuildMaps::Base::SourceTargetMap*>&,
+ const gsl::not_null<BuildMaps::Base::TargetsFileMap*>&,
+ const gsl::not_null<BuildMaps::Base::UserRuleMap*>&,
+ const gsl::not_null<ResultTargetMap*>&,
+ std::size_t jobs = 0) -> TargetMap;
+
+auto IsBuiltInRule(nlohmann::json const& rule_type) -> bool;
+
+} // namespace BuildMaps::Target
+
+#endif
diff --git a/src/buildtool/build_engine/target_map/utils.cpp b/src/buildtool/build_engine/target_map/utils.cpp
new file mode 100644
index 00000000..8c5353ce
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/utils.cpp
@@ -0,0 +1,197 @@
+#include "src/buildtool/build_engine/target_map/utils.hpp"
+
+#include <algorithm>
+#include <filesystem>
+#include <vector>
+
+auto BuildMaps::Target::Utils::obtainTargetByName(
+ const SubExprEvaluator& eval,
+ const ExpressionPtr& expr,
+ const Configuration& env,
+ const Base::EntityName& current,
+ std::unordered_map<BuildMaps::Target::ConfiguredTarget,
+ AnalysedTargetPtr> const& deps_by_transition)
+ -> AnalysedTargetPtr {
+ auto const& empty_map_exp = Expression::kEmptyMapExpr;
+ auto reference = eval(expr["dep"], env);
+ std::string error{};
+ auto target = BuildMaps::Base::ParseEntityNameFromExpression(
+ reference, current, [&error](std::string const& parse_err) {
+ error = parse_err;
+ });
+ if (not target) {
+ throw Evaluator::EvaluationError{
+ fmt::format("Parsing target name {} failed with:\n{}",
+ reference->ToString(),
+ error)};
+ }
+ auto transition = eval(expr->Get("transition", empty_map_exp), env);
+ auto it = deps_by_transition.find(BuildMaps::Target::ConfiguredTarget{
+ *target, Configuration{transition}});
+ if (it == deps_by_transition.end()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "Reference to undeclared dependency {} in transition {}",
+ reference->ToString(),
+ transition->ToString())};
+ }
+ return it->second;
+}
+
+auto BuildMaps::Target::Utils::obtainTarget(
+ const SubExprEvaluator& eval,
+ const ExpressionPtr& expr,
+ const Configuration& env,
+ std::unordered_map<BuildMaps::Target::ConfiguredTarget,
+ AnalysedTargetPtr> const& deps_by_transition)
+ -> AnalysedTargetPtr {
+ auto const& empty_map_exp = Expression::kEmptyMapExpr;
+ auto reference = eval(expr["dep"], env);
+ if (not reference->IsName()) {
+ throw Evaluator::EvaluationError{
+ fmt::format("Not a target name: {}", reference->ToString())};
+ }
+ auto transition = eval(expr->Get("transition", empty_map_exp), env);
+ auto it = deps_by_transition.find(BuildMaps::Target::ConfiguredTarget{
+ reference->Name(), Configuration{transition}});
+ if (it == deps_by_transition.end()) {
+ throw Evaluator::EvaluationError{fmt::format(
+ "Reference to undeclared dependency {} in transition {}",
+ reference->ToString(),
+ transition->ToString())};
+ }
+ return it->second;
+}
+
+auto BuildMaps::Target::Utils::keys_expr(const ExpressionPtr& map)
+ -> ExpressionPtr {
+ auto const& m = map->Map();
+ auto result = Expression::list_t{};
+ result.reserve(m.size());
+ std::for_each(m.begin(), m.end(), [&](auto const& item) {
+ result.emplace_back(ExpressionPtr{item.first});
+ });
+ return ExpressionPtr{result};
+}
+
+auto BuildMaps::Target::Utils::tree_conflict(const ExpressionPtr& map)
+ -> std::optional<std::string> {
+ std::vector<std::filesystem::path> trees{};
+ for (auto const& [path, artifact] : map->Map()) {
+ if (artifact->Artifact().IsTree()) {
+ trees.emplace_back(std::filesystem::path{path});
+ }
+ }
+ if (trees.empty()) {
+ return std::nullopt;
+ }
+ for (auto const& [path, artifact] : map->Map()) {
+ auto p = std::filesystem::path{path};
+ for (auto const& treepath : trees) {
+ if (not artifact->Artifact().IsTree()) {
+ if (std::mismatch(treepath.begin(), treepath.end(), p.begin())
+ .first == treepath.end()) {
+ return path;
+ }
+ }
+ }
+ }
+ return std::nullopt;
+}
+
+auto BuildMaps::Target::Utils::getTainted(
+ std::set<std::string>* tainted,
+ const Configuration& config,
+ const ExpressionPtr& tainted_exp,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger) -> bool {
+ if (not tainted_exp) {
+ return false;
+ }
+ auto tainted_val =
+ tainted_exp.Evaluate(config, {}, [logger](auto const& msg) {
+ (*logger)(fmt::format("While evaluating tainted:\n{}", msg), true);
+ });
+ if (not tainted_val) {
+ return false;
+ }
+ if (not tainted_val->IsList()) {
+ (*logger)(fmt::format("tainted should evaluate to a list of strings, "
+ "but got {}",
+ tainted_val->ToString()),
+ true);
+ return false;
+ }
+ for (auto const& entry : tainted_val->List()) {
+ if (not entry->IsString()) {
+ (*logger)(fmt::format("tainted should evaluate to a list of "
+ "strings, but got {}",
+ tainted_val->ToString()),
+ true);
+ return false;
+ }
+ tainted->insert(entry->String());
+ }
+ return true;
+}
+
+namespace {
+auto hash_vector(std::vector<std::string> const& vec) -> std::string {
+ auto hasher = HashGenerator{BuildMaps::Target::Utils::kActionHash}
+ .IncrementalHasher();
+ for (auto const& s : vec) {
+ hasher.Update(HashGenerator{BuildMaps::Target::Utils::kActionHash}
+ .Run(s)
+ .Bytes());
+ }
+ auto digest = std::move(hasher).Finalize();
+ if (not digest) {
+ Logger::Log(LogLevel::Error, "Failed to finalize hash.");
+ std::terminate();
+ }
+ return digest->Bytes();
+}
+} // namespace
+
+auto BuildMaps::Target::Utils::createAction(
+ ActionDescription::outputs_t output_files,
+ ActionDescription::outputs_t output_dirs,
+ std::vector<std::string> command,
+ const ExpressionPtr& env,
+ std::optional<std::string> may_fail,
+ bool no_cache,
+ const ExpressionPtr& inputs_exp) -> ActionDescription {
+ auto hasher = HashGenerator{BuildMaps::Target::Utils::kActionHash}
+ .IncrementalHasher();
+ hasher.Update(hash_vector(output_files));
+ hasher.Update(hash_vector(output_dirs));
+ hasher.Update(hash_vector(command));
+ hasher.Update(env->ToHash());
+ hasher.Update(hash_vector(may_fail ? std::vector<std::string>{*may_fail}
+ : std::vector<std::string>{}));
+ hasher.Update(no_cache ? std::string{"N"} : std::string{"Y"});
+ hasher.Update(inputs_exp->ToHash());
+
+ auto digest = std::move(hasher).Finalize();
+ if (not digest) {
+ Logger::Log(LogLevel::Error, "Failed to finalize hash.");
+ std::terminate();
+ }
+ auto action_id = digest->HexString();
+
+ std::map<std::string, std::string> env_vars{};
+ for (auto const& [env_var, env_value] : env->Map()) {
+ env_vars.emplace(env_var, env_value->String());
+ }
+ ActionDescription::inputs_t inputs;
+ inputs.reserve(inputs_exp->Map().size());
+ for (auto const& [input_path, artifact] : inputs_exp->Map()) {
+ inputs.emplace(input_path, artifact->Artifact());
+ }
+ return ActionDescription{std::move(output_files),
+ std::move(output_dirs),
+ Action{std::move(action_id),
+ std::move(command),
+ std::move(env_vars),
+ std::move(may_fail),
+ no_cache},
+ std::move(inputs)};
+}
diff --git a/src/buildtool/build_engine/target_map/utils.hpp b/src/buildtool/build_engine/target_map/utils.hpp
new file mode 100644
index 00000000..e92e6281
--- /dev/null
+++ b/src/buildtool/build_engine/target_map/utils.hpp
@@ -0,0 +1,55 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_UTILS_HPP
+#define INCLUDED_SRC_BUILDTOOL_BUILD_ENGINE_TARGET_MAP_UTILS_HPP
+
+#include <optional>
+#include <unordered_map>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/build_engine/analysed_target/analysed_target.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/base_maps/field_reader.hpp"
+#include "src/buildtool/build_engine/expression/configuration.hpp"
+#include "src/buildtool/build_engine/expression/evaluator.hpp"
+#include "src/buildtool/build_engine/expression/function_map.hpp"
+#include "src/buildtool/build_engine/target_map/configured_target.hpp"
+#include "src/buildtool/build_engine/target_map/target_map.hpp"
+
+namespace BuildMaps::Target::Utils {
+
+constexpr HashGenerator::HashType kActionHash = HashGenerator::HashType::SHA256;
+
+auto obtainTargetByName(const SubExprEvaluator&,
+ const ExpressionPtr&,
+ const Configuration&,
+ const Base::EntityName&,
+ std::unordered_map<BuildMaps::Target::ConfiguredTarget,
+ AnalysedTargetPtr> const&)
+ -> AnalysedTargetPtr;
+
+auto obtainTarget(const SubExprEvaluator&,
+ const ExpressionPtr&,
+ const Configuration&,
+ std::unordered_map<BuildMaps::Target::ConfiguredTarget,
+ AnalysedTargetPtr> const&)
+ -> AnalysedTargetPtr;
+
+auto keys_expr(const ExpressionPtr& map) -> ExpressionPtr;
+
+auto tree_conflict(const ExpressionPtr & /* map */)
+ -> std::optional<std::string>;
+
+auto getTainted(std::set<std::string>* tainted,
+ const Configuration& config,
+ const ExpressionPtr& tainted_exp,
+ const BuildMaps::Target::TargetMap::LoggerPtr& logger) -> bool;
+
+auto createAction(ActionDescription::outputs_t output_files,
+ ActionDescription::outputs_t output_dirs,
+ std::vector<std::string> command,
+ const ExpressionPtr& env,
+ std::optional<std::string> may_fail,
+ bool no_cache,
+ const ExpressionPtr& inputs_exp) -> ActionDescription;
+
+} // namespace BuildMaps::Target::Utils
+#endif
diff --git a/src/buildtool/common/TARGETS b/src/buildtool/common/TARGETS
new file mode 100644
index 00000000..642ff0ff
--- /dev/null
+++ b/src/buildtool/common/TARGETS
@@ -0,0 +1,101 @@
+{ "cli":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["cli"]
+ , "hdrs": ["cli.hpp"]
+ , "deps":
+ [ ["src/buildtool/logging", "log_level"]
+ , ["@", "cli11", "", "cli11"]
+ , ["@", "json", "", "json"]
+ , ["@", "fmt", "", "fmt"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "common"]
+ }
+, "bazel_types":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["bazel_types"]
+ , "hdrs": ["bazel_types.hpp"]
+ , "deps": [["@", "grpc", "", "grpc++"]]
+ , "proto": [["@", "bazel_remote_apis", "", "remote_execution_proto"]]
+ , "stage": ["src", "buildtool", "common"]
+ }
+, "common":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["common"]
+ , "hdrs":
+ [ "action.hpp"
+ , "artifact_digest.hpp"
+ , "artifact.hpp"
+ , "identifier.hpp"
+ , "statistics.hpp"
+ ]
+ , "deps":
+ [ "bazel_types"
+ , ["src/buildtool/crypto", "hash_generator"]
+ , ["src/buildtool/file_system", "object_type"]
+ , ["src/utils/cpp", "type_safe_arithmetic"]
+ , ["src/utils/cpp", "hash_combine"]
+ , ["@", "json", "", "json"]
+ ]
+ , "stage": ["src", "buildtool", "common"]
+ }
+, "artifact_factory":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["artifact_factory"]
+ , "hdrs": ["artifact_factory.hpp"]
+ , "deps":
+ [ "common"
+ , "artifact_description"
+ , "action_description"
+ , ["src/buildtool/logging", "logging"]
+ , ["src/utils/cpp", "type_safe_arithmetic"]
+ ]
+ , "stage": ["src", "buildtool", "common"]
+ }
+, "artifact_description":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["artifact_description"]
+ , "hdrs": ["artifact_description.hpp"]
+ , "deps":
+ [ "common"
+ , ["src/buildtool/file_system", "object_type"]
+ , ["src/buildtool/logging", "logging"]
+ , ["src/utils/cpp", "json"]
+ ]
+ , "stage": ["src", "buildtool", "common"]
+ }
+, "action_description":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["action_description"]
+ , "hdrs": ["action_description.hpp"]
+ , "deps":
+ [ "common"
+ , "artifact_description"
+ , ["src/buildtool/logging", "logging"]
+ , ["@", "json", "", "json"]
+ ]
+ , "stage": ["src", "buildtool", "common"]
+ }
+, "tree":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["tree"]
+ , "hdrs": ["tree.hpp"]
+ , "deps":
+ [ "common"
+ , "artifact_description"
+ , ["src/buildtool/logging", "logging"]
+ , ["@", "json", "", "json"]
+ ]
+ , "stage": ["src", "buildtool", "common"]
+ }
+, "config":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["config"]
+ , "hdrs": ["repository_config.hpp"]
+ , "deps":
+ [ ["src/buildtool/file_system", "file_root"]
+ , ["src/buildtool/file_system", "git_cas"]
+ ]
+ , "stage": ["src", "buildtool", "common"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/common/action.hpp b/src/buildtool/common/action.hpp
new file mode 100644
index 00000000..b547ecf3
--- /dev/null
+++ b/src/buildtool/common/action.hpp
@@ -0,0 +1,78 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_ACTION_HPP
+#define INCLUDED_SRC_BUILDTOOL_COMMON_ACTION_HPP
+
+#include <map>
+#include <optional>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "src/buildtool/common/identifier.hpp"
+
+class Action {
+ public:
+ using LocalPath = std::string;
+
+ Action(std::string action_id,
+ std::vector<std::string> command,
+ std::map<std::string, std::string> env_vars,
+ std::optional<std::string> may_fail,
+ bool no_cache)
+ : id_{std::move(action_id)},
+ command_{std::move(command)},
+ env_{std::move(env_vars)},
+ may_fail_{std::move(may_fail)},
+ no_cache_{no_cache} {}
+
+ Action(std::string action_id,
+ std::vector<std::string> command,
+ std::map<std::string, std::string> env_vars)
+ : Action(std::move(action_id),
+ std::move(command),
+ std::move(env_vars),
+ std::nullopt,
+ false) {}
+
+ [[nodiscard]] auto Id() const noexcept -> ActionIdentifier { return id_; }
+
+ [[nodiscard]] auto Command() && noexcept -> std::vector<std::string> {
+ return std::move(command_);
+ }
+
+ [[nodiscard]] auto Command() const& noexcept
+ -> std::vector<std::string> const& {
+ return command_;
+ }
+
+ [[nodiscard]] auto Env() const& noexcept
+ -> std::map<std::string, std::string> {
+ return env_;
+ }
+
+ [[nodiscard]] auto Env() && noexcept -> std::map<std::string, std::string> {
+ return std::move(env_);
+ }
+
+ [[nodiscard]] auto IsTreeAction() const -> bool { return is_tree_; }
+ [[nodiscard]] auto MayFail() const -> std::optional<std::string> {
+ return may_fail_;
+ }
+ [[nodiscard]] auto NoCache() const -> bool { return no_cache_; }
+
+ [[nodiscard]] static auto CreateTreeAction(ActionIdentifier const& id)
+ -> Action {
+ return Action{id};
+ }
+
+ private:
+ ActionIdentifier id_{};
+ std::vector<std::string> command_{};
+ std::map<std::string, std::string> env_{};
+ bool is_tree_{};
+ std::optional<std::string> may_fail_{};
+ bool no_cache_{};
+
+ explicit Action(ActionIdentifier id) : id_{std::move(id)}, is_tree_{true} {}
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_COMMON_ACTION_HPP
diff --git a/src/buildtool/common/action_description.hpp b/src/buildtool/common/action_description.hpp
new file mode 100644
index 00000000..9b3469c1
--- /dev/null
+++ b/src/buildtool/common/action_description.hpp
@@ -0,0 +1,200 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_ACTION_DESCRIPTION_HPP
+#define INCLUDED_SRC_BUILDTOOL_COMMON_ACTION_DESCRIPTION_HPP
+
+#include <map>
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "nlohmann/json.hpp"
+#include "src/buildtool/common/action.hpp"
+#include "src/buildtool/common/artifact_description.hpp"
+
+class ActionDescription {
+ public:
+ using outputs_t = std::vector<std::string>;
+ using inputs_t = std::unordered_map<std::string, ArtifactDescription>;
+
+ ActionDescription(outputs_t output_files,
+ outputs_t output_dirs,
+ Action action,
+ inputs_t inputs)
+ : output_files_{std::move(output_files)},
+ output_dirs_{std::move(output_dirs)},
+ action_{std::move(action)},
+ inputs_{std::move(inputs)} {}
+
+ [[nodiscard]] static auto FromJson(std::string const& id,
+ nlohmann::json const& desc) noexcept
+ -> std::optional<ActionDescription> {
+ try {
+ auto outputs =
+ ExtractValueAs<std::vector<std::string>>(desc, "output");
+ auto output_dirs =
+ ExtractValueAs<std::vector<std::string>>(desc, "output_dirs");
+ auto command =
+ ExtractValueAs<std::vector<std::string>>(desc, "command");
+
+ if ((not outputs.has_value() or outputs->empty()) and
+ (not output_dirs.has_value() or output_dirs->empty())) {
+ Logger::Log(
+ LogLevel::Error,
+ "Action description for action \"{}\" incomplete: values "
+ "for either \"output\" or \"output_dir\" must be non-empty "
+ "array.",
+ id);
+ return std::nullopt;
+ }
+
+ if (not command.has_value() or command->empty()) {
+ Logger::Log(
+ LogLevel::Error,
+ "Action description for action \"{}\" incomplete: values "
+ "for \"command\" must be non-empty array.",
+ id);
+ return std::nullopt;
+ }
+
+ if (not outputs) {
+ outputs = std::vector<std::string>{};
+ }
+ if (not output_dirs) {
+ output_dirs = std::vector<std::string>{};
+ }
+
+ auto optional_key_value_reader =
+ [](nlohmann::json const& action_desc,
+ std::string const& key) -> nlohmann::json {
+ auto it = action_desc.find(key);
+ if (it == action_desc.end()) {
+ return nlohmann::json::object();
+ }
+ return *it;
+ };
+ auto const input = optional_key_value_reader(desc, "input");
+ auto const env = optional_key_value_reader(desc, "env");
+
+ if (not(input.is_object() and env.is_object())) {
+ Logger::Log(
+ LogLevel::Error,
+ "Action description for action \"{}\" type error: values "
+ "for \"input\" and \"env\" must be objects.",
+ id);
+ return std::nullopt;
+ }
+
+ inputs_t inputs{};
+ for (auto const& [path, input_desc] : input.items()) {
+ auto artifact = ArtifactDescription::FromJson(input_desc);
+ if (not artifact) {
+ return std::nullopt;
+ }
+ inputs.emplace(path, std::move(*artifact));
+ }
+ std::optional<std::string> may_fail{};
+ bool no_cache{};
+ auto may_fail_it = desc.find("may_fail");
+ if (may_fail_it != desc.end()) {
+ if (not may_fail_it->is_string()) {
+ Logger::Log(LogLevel::Error,
+ "may_fail has to be a boolean");
+ return std::nullopt;
+ }
+ may_fail = *may_fail_it;
+ }
+ auto no_cache_it = desc.find("no_cache");
+ if (no_cache_it != desc.end()) {
+ if (not no_cache_it->is_boolean()) {
+ Logger::Log(LogLevel::Error,
+ "no_cache has to be a boolean");
+ return std::nullopt;
+ }
+ no_cache = *no_cache_it;
+ }
+
+ return ActionDescription{
+ std::move(*outputs),
+ std::move(*output_dirs),
+ Action{id, std::move(*command), env, may_fail, no_cache},
+ inputs};
+ } catch (std::exception const& ex) {
+ Logger::Log(
+ LogLevel::Error,
+ "Failed to parse action description from JSON with error:\n{}",
+ ex.what());
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] auto Id() const noexcept -> ActionIdentifier {
+ return action_.Id();
+ }
+
+ [[nodiscard]] auto ToJson() const noexcept -> nlohmann::json {
+ auto json = nlohmann::json{{"command", action_.Command()}};
+ if (not output_files_.empty()) {
+ json["output"] = output_files_;
+ }
+ if (not output_dirs_.empty()) {
+ json["output_dirs"] = output_dirs_;
+ }
+ if (not inputs_.empty()) {
+ auto inputs = nlohmann::json::object();
+ for (auto const& [path, artifact] : inputs_) {
+ inputs[path] = artifact.ToJson();
+ }
+ json["input"] = inputs;
+ }
+ if (not action_.Env().empty()) {
+ json["env"] = action_.Env();
+ }
+ if (action_.MayFail()) {
+ json["may_fail"] = *action_.MayFail();
+ }
+ if (action_.NoCache()) {
+ json["no_cache"] = true;
+ }
+ return json;
+ }
+
+ [[nodiscard]] auto OutputFiles() const& noexcept -> outputs_t const& {
+ return output_files_;
+ }
+
+ [[nodiscard]] auto OutputFiles() && noexcept -> outputs_t {
+ return std::move(output_files_);
+ }
+
+ [[nodiscard]] auto OutputDirs() const& noexcept -> outputs_t const& {
+ return output_dirs_;
+ }
+
+ [[nodiscard]] auto OutputDirs() && noexcept -> outputs_t {
+ return std::move(output_dirs_);
+ }
+
+ [[nodiscard]] auto GraphAction() const& noexcept -> Action const& {
+ return action_;
+ }
+
+ [[nodiscard]] auto GraphAction() && noexcept -> Action {
+ return std::move(action_);
+ }
+
+ [[nodiscard]] auto Inputs() const& noexcept -> inputs_t const& {
+ return inputs_;
+ }
+
+ [[nodiscard]] auto Inputs() && noexcept -> inputs_t {
+ return std::move(inputs_);
+ }
+
+ private:
+ outputs_t output_files_;
+ outputs_t output_dirs_;
+ Action action_;
+ inputs_t inputs_;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_COMMON_ACTION_DESCRIPTION_HPP
diff --git a/src/buildtool/common/artifact.hpp b/src/buildtool/common/artifact.hpp
new file mode 100644
index 00000000..6c97ab24
--- /dev/null
+++ b/src/buildtool/common/artifact.hpp
@@ -0,0 +1,214 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_ARTIFACT_HPP
+#define INCLUDED_SRC_BUILDTOOL_COMMON_ARTIFACT_HPP
+
+#include <filesystem>
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <utility>
+
+#include "nlohmann/json.hpp"
+#include "src/buildtool/common/artifact_digest.hpp"
+#include "src/buildtool/common/identifier.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+#include "src/utils/cpp/hash_combine.hpp"
+
+// Artifacts (source files, libraries, executables...) need to store their
+// identifier
+class Artifact {
+ public:
+ struct ObjectInfo {
+ ArtifactDigest digest{};
+ ObjectType type{};
+ bool failed{};
+
+ [[nodiscard]] auto operator==(ObjectInfo const& other) const {
+ return (digest == other.digest and type == other.type and
+ failed == other.failed);
+ }
+
+ [[nodiscard]] auto operator!=(ObjectInfo const& other) const {
+ return not(*this == other);
+ }
+
+ // Create string of the form '[hash:size:type]'
+ [[nodiscard]] auto ToString() const noexcept -> std::string {
+ return fmt::format("[{}:{}:{}]{}",
+ digest.hash(),
+ digest.size(),
+ ToChar(type),
+ failed ? " FAILED" : "");
+ }
+
+ // Create JSON of the form '{"id": "hash", "size": x, "file_type": "f"}'
+ // As the failed property is only internal to a run, discard it.
+ [[nodiscard]] auto ToJson() const noexcept -> nlohmann::json {
+ return {{"id", digest.hash()},
+ {"size", digest.size()},
+ {"file_type", std::string{ToChar(type)}}};
+ }
+
+ [[nodiscard]] static auto FromString(std::string const& s) noexcept
+ -> std::optional<ObjectInfo> {
+ std::istringstream iss(s);
+ std::string id{};
+ std::string size_str{};
+ std::string type{};
+ if (not(iss.get() == '[') or not std::getline(iss, id, ':') or
+ not std::getline(iss, size_str, ':') or
+ not std::getline(iss, type, ']')) {
+ Logger::Log(LogLevel::Error,
+ "failed parsing object info from string.");
+ return std::nullopt;
+ }
+ try {
+ std::size_t size = std::stoul(size_str);
+ return ObjectInfo{ArtifactDigest{id, size},
+ FromChar(*type.c_str())};
+ } catch (std::out_of_range const& e) {
+ Logger::Log(LogLevel::Error,
+ "size raised out_of_range exception.");
+ } catch (std::invalid_argument const& e) {
+ Logger::Log(LogLevel::Error,
+ "size raised invalid_argument exception.");
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] static auto FromJson(nlohmann::json const& j)
+ -> std::optional<ObjectInfo> {
+ if (j.is_object() and j["id"].is_string() and
+ j["size"].is_number() and j["type"].is_string()) {
+ return ObjectInfo{
+ ArtifactDigest{j["id"].get<std::string>(),
+ j["size"].get<std::size_t>()},
+ FromChar(*(j["type"].get<std::string>().c_str()))};
+ }
+ return std::nullopt;
+ }
+ };
+
+ explicit Artifact(ArtifactIdentifier id) noexcept : id_{std::move(id)} {}
+
+ Artifact(Artifact const& other) noexcept
+ : id_{other.id_}, file_path_{other.file_path_}, repo_{other.repo_} {
+ object_info_ = other.object_info_;
+ }
+
+ Artifact(Artifact&&) noexcept = default;
+ ~Artifact() noexcept = default;
+ auto operator=(Artifact const&) noexcept -> Artifact& = delete;
+ auto operator=(Artifact&&) noexcept -> Artifact& = default;
+
+ [[nodiscard]] auto Id() const& noexcept -> ArtifactIdentifier const& {
+ return id_;
+ }
+
+ [[nodiscard]] auto Id() && noexcept -> ArtifactIdentifier {
+ return std::move(id_);
+ }
+
+ [[nodiscard]] auto FilePath() const noexcept
+ -> std::optional<std::filesystem::path> {
+ return file_path_;
+ }
+
+ [[nodiscard]] auto Repository() const noexcept -> std::string {
+ return repo_;
+ }
+
+ [[nodiscard]] auto Digest() const noexcept
+ -> std::optional<ArtifactDigest> {
+ return object_info_ ? std::optional{object_info_->digest}
+ : std::nullopt;
+ }
+
+ [[nodiscard]] auto Type() const noexcept -> std::optional<ObjectType> {
+ return object_info_ ? std::optional{object_info_->type} : std::nullopt;
+ }
+
+ [[nodiscard]] auto Info() const& noexcept
+ -> std::optional<ObjectInfo> const& {
+ return object_info_;
+ }
+ [[nodiscard]] auto Info() && noexcept -> std::optional<ObjectInfo> {
+ return std::move(object_info_);
+ }
+
+ void SetObjectInfo(ObjectInfo const& object_info,
+ bool fail_info) const noexcept {
+ if (fail_info) {
+ object_info_ =
+ ObjectInfo{object_info.digest, object_info.type, true};
+ }
+ else {
+ object_info_ = object_info;
+ }
+ }
+
+ void SetObjectInfo(ArtifactDigest const& digest,
+ ObjectType type,
+ bool failed) const noexcept {
+ object_info_ = ObjectInfo{digest, type, failed};
+ }
+
+ [[nodiscard]] static auto CreateLocalArtifact(
+ std::string const& id,
+ std::filesystem::path const& file_path,
+ std::string const& repo) noexcept -> Artifact {
+ return Artifact{id, file_path, repo};
+ }
+
+ [[nodiscard]] static auto CreateKnownArtifact(
+ std::string const& id,
+ std::string const& hash,
+ std::size_t size,
+ ObjectType type,
+ std::optional<std::string> const& repo) noexcept -> Artifact {
+ return Artifact{id, {hash, size}, type, false, repo};
+ }
+
+ [[nodiscard]] static auto CreateActionArtifact(
+ std::string const& id) noexcept -> Artifact {
+ return Artifact{id};
+ }
+
+ private:
+ ArtifactIdentifier id_{};
+ std::optional<std::filesystem::path> file_path_{};
+ std::string repo_{};
+ mutable std::optional<ObjectInfo> object_info_{};
+
+ Artifact(ArtifactIdentifier id,
+ std::filesystem::path const& file_path,
+ std::string repo) noexcept
+ : id_{std::move(id)}, file_path_{file_path}, repo_{std::move(repo)} {}
+
+ Artifact(ArtifactIdentifier id,
+ ArtifactDigest const& digest,
+ ObjectType type,
+ bool failed,
+ std::optional<std::string> repo) noexcept
+ : id_{std::move(id)} {
+ SetObjectInfo(digest, type, failed);
+ if (repo) {
+ repo_ = std::move(*repo);
+ }
+ }
+};
+
+namespace std {
+template <>
+struct hash<Artifact::ObjectInfo> {
+ [[nodiscard]] auto operator()(
+ Artifact::ObjectInfo const& info) const noexcept -> std::size_t {
+ std::size_t seed{};
+ hash_combine(&seed, info.digest);
+ hash_combine(&seed, info.type);
+ hash_combine(&seed, info.failed);
+ return seed;
+ }
+};
+} // namespace std
+
+#endif // INCLUDED_SRC_BUILDTOOL_COMMON_ARTIFACT_HPP
diff --git a/src/buildtool/common/artifact_description.hpp b/src/buildtool/common/artifact_description.hpp
new file mode 100644
index 00000000..3820edc4
--- /dev/null
+++ b/src/buildtool/common/artifact_description.hpp
@@ -0,0 +1,316 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_ARTIFACT_DESCRIPTION_HPP
+#define INCLUDED_SRC_BUILDTOOL_COMMON_ARTIFACT_DESCRIPTION_HPP
+
+#include <filesystem>
+#include <optional>
+#include <string>
+#include <variant>
+
+#include "src/buildtool/common/artifact.hpp"
+#include "src/buildtool/common/artifact_digest.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+#include "src/buildtool/logging/logger.hpp"
+#include "src/utils/cpp/json.hpp"
+
+class ArtifactDescription {
+ using Local = std::pair<std::filesystem::path, std::string>;
+ using Known =
+ std::tuple<ArtifactDigest, ObjectType, std::optional<std::string>>;
+ using Action = std::pair<std::string, std::filesystem::path>;
+ using Tree = std::string;
+
+ public:
+ explicit ArtifactDescription(std::filesystem::path path,
+ std::string repository) noexcept
+ : data_{std::make_pair(std::move(path), std::move(repository))} {}
+
+ ArtifactDescription(ArtifactDigest digest,
+ ObjectType file_type,
+ std::optional<std::string> repo = std::nullopt) noexcept
+ : data_{
+ std::make_tuple(std::move(digest), file_type, std::move(repo))} {}
+
+ ArtifactDescription(std::string action_id,
+ std::filesystem::path path) noexcept
+ : data_{std::make_pair(std::move(action_id), std::move(path))} {}
+
+ explicit ArtifactDescription(std::string tree_id) noexcept
+ : data_{std::move(tree_id)} {}
+
+ [[nodiscard]] static auto FromJson(nlohmann::json const& json) noexcept
+ -> std::optional<ArtifactDescription> {
+ try {
+ auto const type = ExtractValueAs<std::string>(
+ json, "type", [](std::string const& error) {
+ Logger::Log(
+ LogLevel::Error,
+ "{}\ncan not retrieve value for \"type\" from artifact "
+ "description.",
+ error);
+ });
+ auto const data = ExtractValueAs<nlohmann::json>(
+ json, "data", [](std::string const& error) {
+ Logger::Log(
+ LogLevel::Error,
+ "{}\ncan not retrieve value for \"data\" from artifact "
+ "description.",
+ error);
+ });
+
+ if (not(type and data)) {
+ return std::nullopt;
+ }
+
+ if (*type == "LOCAL") {
+ return CreateLocalArtifactDescription(*data);
+ }
+ if (*type == "KNOWN") {
+ return CreateKnownArtifactDescription(*data);
+ }
+ if (*type == "ACTION") {
+ return CreateActionArtifactDescription(*data);
+ }
+ if (*type == "TREE") {
+ return CreateTreeArtifactDescription(*data);
+ }
+ Logger::Log(LogLevel::Error,
+ R"(artifact type must be one of "LOCAL", "KNOWN",
+ "ACTION", or "TREE")");
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "Failed to parse artifact description from JSON with "
+ "error:\n{}",
+ ex.what());
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] auto Id() const noexcept -> ArtifactIdentifier { return id_; }
+
+ [[nodiscard]] auto IsTree() const noexcept -> bool {
+ return std::holds_alternative<Tree>(data_);
+ }
+
+ [[nodiscard]] auto ToJson() const noexcept -> nlohmann::json {
+ try {
+ if (std::holds_alternative<Local>(data_)) {
+ auto const& [path, repo] = std::get<Local>(data_);
+ return DescribeLocalArtifact(path.string(), repo);
+ }
+ if (std::holds_alternative<Known>(data_)) {
+ auto const& [digest, file_type, _] = std::get<Known>(data_);
+ return DescribeKnownArtifact(
+ digest.hash(), digest.size(), file_type);
+ }
+ if (std::holds_alternative<Action>(data_)) {
+ auto const& [action_id, path] = std::get<Action>(data_);
+ return DescribeActionArtifact(action_id, path);
+ }
+ if (std::holds_alternative<Tree>(data_)) {
+ return DescribeTreeArtifact(std::get<Tree>(data_));
+ }
+ Logger::Log(LogLevel::Error,
+ "Internal error, unknown artifact type");
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "Serializing to JSON failed with error:\n{}",
+ ex.what());
+ }
+ gsl_Ensures(false); // unreachable
+ return {};
+ }
+
+ [[nodiscard]] auto ToArtifact() const noexcept -> Artifact {
+ try {
+ if (std::holds_alternative<Local>(data_)) {
+ auto const& [path, repo] = std::get<Local>(data_);
+ return Artifact::CreateLocalArtifact(id_, path.string(), repo);
+ }
+ if (std::holds_alternative<Known>(data_)) {
+ auto const& [digest, file_type, repo] = std::get<Known>(data_);
+ return Artifact::CreateKnownArtifact(
+ id_, digest.hash(), digest.size(), file_type, repo);
+ }
+ if (std::holds_alternative<Action>(data_) or
+ std::holds_alternative<Tree>(data_)) {
+ return Artifact::CreateActionArtifact(id_);
+ }
+ Logger::Log(LogLevel::Error,
+ "Internal error, unknown artifact type");
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "Creating artifact failed with error:\n{}",
+ ex.what());
+ }
+ gsl_Ensures(false); // unreachable
+ return Artifact{{}};
+ }
+
+ [[nodiscard]] auto ToString(int indent = 0) const noexcept -> std::string {
+ try {
+ return ToJson().dump(indent);
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "Serializing artifact failed with error:\n{}",
+ ex.what());
+ }
+ return {};
+ }
+
+ [[nodiscard]] auto operator==(
+ ArtifactDescription const& other) const noexcept -> bool {
+ return data_ == other.data_;
+ }
+
+ [[nodiscard]] auto operator!=(
+ ArtifactDescription const& other) const noexcept -> bool {
+ return not(*this == other);
+ }
+
+ private:
+ inline static HashGenerator const hash_gen_{
+ HashGenerator::HashType::SHA256};
+ std::variant<Local, Known, Action, Tree> data_;
+ ArtifactIdentifier id_{ComputeId(ToJson())};
+
+ [[nodiscard]] static auto ComputeId(nlohmann::json const& desc) noexcept
+ -> ArtifactIdentifier {
+ try {
+ return hash_gen_.Run(desc.dump()).Bytes();
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "Computing artifact id failed with error:\n{}",
+ ex.what());
+ }
+ return {};
+ }
+
+ [[nodiscard]] static auto DescribeLocalArtifact(
+ std::filesystem::path const& src_path,
+ std::string const& repository) noexcept -> nlohmann::json {
+ return {{"type", "LOCAL"},
+ {"data",
+ {{"path", src_path.string()}, {"repository", repository}}}};
+ }
+
+ [[nodiscard]] static auto DescribeKnownArtifact(
+ std::string const& blob_id,
+ std::size_t size,
+ ObjectType type = ObjectType::File) noexcept -> nlohmann::json {
+ std::string const typestr{ToChar(type)};
+ return {{"type", "KNOWN"},
+ {"data",
+ {{"id", blob_id}, {"size", size}, {"file_type", typestr}}}};
+ }
+
+ [[nodiscard]] static auto DescribeActionArtifact(
+ std::string const& action_id,
+ std::string const& out_path) noexcept -> nlohmann::json {
+ return {{"type", "ACTION"},
+ {"data", {{"id", action_id}, {"path", out_path}}}};
+ }
+
+ [[nodiscard]] static auto DescribeTreeArtifact(
+ std::string const& tree_id) noexcept -> nlohmann::json {
+ return {{"type", "TREE"}, {"data", {{"id", tree_id}}}};
+ }
+
+ [[nodiscard]] static auto CreateLocalArtifactDescription(
+ nlohmann::json const& data) -> std::optional<ArtifactDescription> {
+
+ auto const path = ExtractValueAs<std::string>(
+ data, "path", [](std::string const& error) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"path\" from "
+ "LOCAL artifact's data.",
+ error);
+ });
+ auto const repository = ExtractValueAs<std::string>(
+ data, "repository", [](std::string const& error) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"path\" from "
+ "LOCAL artifact's data.",
+ error);
+ });
+ if (path.has_value() and repository.has_value()) {
+ return ArtifactDescription{std::filesystem::path{*path},
+ *repository};
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] static auto CreateKnownArtifactDescription(
+ nlohmann::json const& data) -> std::optional<ArtifactDescription> {
+
+ auto const blob_id = ExtractValueAs<std::string>(
+ data, "id", [](std::string const& error) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"id\" from "
+ "KNOWN artifact's data.",
+ error);
+ });
+ auto const size = ExtractValueAs<std::size_t>(
+ data, "size", [](std::string const& error) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"size\" from "
+ "KNOWN artifact's data.",
+ error);
+ });
+ auto const file_type = ExtractValueAs<std::string>(
+ data, "file_type", [](std::string const& error) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"file_type\" from "
+ "KNOWN artifact's data.",
+ error);
+ });
+ if (blob_id.has_value() and size.has_value() and
+ file_type.has_value() and file_type->size() == 1) {
+ return ArtifactDescription{ArtifactDigest{*blob_id, *size},
+ FromChar((*file_type)[0])};
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] static auto CreateActionArtifactDescription(
+ nlohmann::json const& data) -> std::optional<ArtifactDescription> {
+
+ auto const action_id = ExtractValueAs<std::string>(
+ data, "id", [](std::string const& error) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"id\" from "
+ "ACTION artifact's data.",
+ error);
+ });
+
+ auto const path = ExtractValueAs<std::string>(
+ data, "path", [](std::string const& error) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"path\" from "
+ "ACTION artifact's data.",
+ error);
+ });
+ if (action_id.has_value() and path.has_value()) {
+ return ArtifactDescription{*action_id,
+ std::filesystem::path{*path}};
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] static auto CreateTreeArtifactDescription(
+ nlohmann::json const& data) -> std::optional<ArtifactDescription> {
+ auto const tree_id = ExtractValueAs<std::string>(
+ data, "id", [](std::string const& error) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"id\" from "
+ "TREE artifact's data.",
+ error);
+ });
+
+ if (tree_id.has_value()) {
+ return ArtifactDescription{*tree_id};
+ }
+ return std::nullopt;
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_COMMON_ARTIFACT_DESCRIPTION_HPP
diff --git a/src/buildtool/common/artifact_digest.hpp b/src/buildtool/common/artifact_digest.hpp
new file mode 100644
index 00000000..7499e975
--- /dev/null
+++ b/src/buildtool/common/artifact_digest.hpp
@@ -0,0 +1,74 @@
+#ifndef INCLUDED_SRC_COMMON_ARTIFACT_DIGEST_HPP
+#define INCLUDED_SRC_COMMON_ARTIFACT_DIGEST_HPP
+
+#include <optional>
+#include <string>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/crypto/hash_generator.hpp"
+#include "src/utils/cpp/hash_combine.hpp"
+
+// Wrapper for bazel_re::Digest. Can be implicitly cast to bazel_re::Digest.
+// Provides getter for size with convenient non-protobuf type.
+class ArtifactDigest {
+ public:
+ ArtifactDigest() noexcept = default;
+ explicit ArtifactDigest(bazel_re::Digest digest) noexcept
+ : size_{gsl::narrow<std::size_t>(digest.size_bytes())},
+ digest_{std::move(digest)} {}
+ ArtifactDigest(std::string hash, std::size_t size) noexcept
+ : size_{size}, digest_{CreateBazelDigest(std::move(hash), size)} {}
+
+ [[nodiscard]] auto hash() const& noexcept -> std::string const& {
+ return digest_.hash();
+ }
+
+ [[nodiscard]] auto hash() && noexcept -> std::string {
+ return std::move(*digest_.mutable_hash());
+ }
+
+ [[nodiscard]] auto size() const noexcept -> std::size_t { return size_; }
+
+ // NOLINTNEXTLINE allow implicit casts
+ [[nodiscard]] operator bazel_re::Digest const &() const& { return digest_; }
+ // NOLINTNEXTLINE allow implicit casts
+ [[nodiscard]] operator bazel_re::Digest() && { return std::move(digest_); }
+
+ [[nodiscard]] auto operator==(ArtifactDigest const& other) const -> bool {
+ return std::equal_to<bazel_re::Digest>{}(*this, other);
+ }
+
+ [[nodiscard]] static auto Create(std::string const& content) noexcept
+ -> ArtifactDigest {
+ return ArtifactDigest{ComputeHash(content), content.size()};
+ }
+
+ private:
+ std::size_t size_{};
+ bazel_re::Digest digest_{};
+
+ [[nodiscard]] static auto CreateBazelDigest(std::string&& hash,
+ std::size_t size)
+ -> bazel_re::Digest {
+ bazel_re::Digest d;
+ d.set_hash(std::move(hash));
+ d.set_size_bytes(gsl::narrow<google::protobuf::int64>(size));
+ return d;
+ }
+};
+
+namespace std {
+template <>
+struct hash<ArtifactDigest> {
+ [[nodiscard]] auto operator()(ArtifactDigest const& digest) const noexcept
+ -> std::size_t {
+ std::size_t seed{};
+ hash_combine(&seed, digest.hash());
+ hash_combine(&seed, digest.size());
+ return seed;
+ }
+};
+} // namespace std
+
+#endif // INCLUDED_SRC_COMMON_ARTIFACT_DIGEST_HPP
diff --git a/src/buildtool/common/artifact_factory.hpp b/src/buildtool/common/artifact_factory.hpp
new file mode 100644
index 00000000..e3ef2d0c
--- /dev/null
+++ b/src/buildtool/common/artifact_factory.hpp
@@ -0,0 +1,91 @@
+#ifndef INCLUDED_SRC_COMMON_ARTIFACT_FACTORY_HPP
+#define INCLUDED_SRC_COMMON_ARTIFACT_FACTORY_HPP
+
+#include <algorithm>
+#include <optional>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "src/buildtool/common/action_description.hpp"
+#include "src/buildtool/common/artifact.hpp"
+#include "src/buildtool/common/artifact_description.hpp"
+#include "src/buildtool/common/identifier.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+#include "src/buildtool/logging/logger.hpp"
+#include "src/utils/cpp/json.hpp"
+
+class ArtifactFactory {
+ public:
+ [[nodiscard]] static auto Identifier(nlohmann::json const& description)
+ -> ArtifactIdentifier {
+ auto desc = ArtifactDescription::FromJson(description);
+ if (desc) {
+ return desc->Id();
+ }
+ return {};
+ }
+
+ [[nodiscard]] static auto FromDescription(nlohmann::json const& description)
+ -> std::optional<Artifact> {
+ auto desc = ArtifactDescription::FromJson(description);
+ if (desc) {
+ return desc->ToArtifact();
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] static auto DescribeLocalArtifact(
+ std::filesystem::path const& src_path,
+ std::string repository) noexcept -> nlohmann::json {
+ return ArtifactDescription{src_path, std::move(repository)}.ToJson();
+ }
+
+ [[nodiscard]] static auto DescribeKnownArtifact(
+ std::string const& blob_id,
+ std::size_t size,
+ ObjectType type = ObjectType::File) noexcept -> nlohmann::json {
+ return ArtifactDescription{ArtifactDigest{blob_id, size}, type}
+ .ToJson();
+ }
+
+ [[nodiscard]] static auto DescribeActionArtifact(
+ std::string const& action_id,
+ std::string const& out_path) noexcept -> nlohmann::json {
+ return ArtifactDescription{action_id, std::filesystem::path{out_path}}
+ .ToJson();
+ }
+
+ [[nodiscard]] static auto DescribeTreeArtifact(
+ std::string const& tree_id) noexcept -> nlohmann::json {
+ return ArtifactDescription{tree_id}.ToJson();
+ }
+
+ [[nodiscard]] static auto DescribeAction(
+ std::vector<std::string> const& output_files,
+ std::vector<std::string> const& output_dirs,
+ std::vector<std::string> const& command) noexcept -> nlohmann::json {
+ return ActionDescription{
+ output_files, output_dirs, Action{"unused", command, {}}, {}}
+ .ToJson();
+ }
+
+ [[nodiscard]] static auto DescribeAction(
+ std::vector<std::string> const& output_files,
+ std::vector<std::string> const& output_dirs,
+ std::vector<std::string> const& command,
+ ActionDescription::inputs_t const& input,
+ std::map<std::string, std::string> const& env) noexcept
+ -> nlohmann::json {
+ return ActionDescription{
+ output_files, output_dirs, Action{"unused", command, env}, input}
+ .ToJson();
+ }
+
+ [[nodiscard]] static auto IsLocal(nlohmann::json const& description)
+ -> bool {
+ return description.at("type") == "LOCAL";
+ }
+}; // class ArtifactFactory
+
+#endif // INCLUDED_SRC_COMMON_ARTIFACT_FACTORY_HPP
diff --git a/src/buildtool/common/bazel_types.hpp b/src/buildtool/common/bazel_types.hpp
new file mode 100644
index 00000000..b7d64409
--- /dev/null
+++ b/src/buildtool/common/bazel_types.hpp
@@ -0,0 +1,86 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_BAZEL_TYPES_HPP
+#define INCLUDED_SRC_BUILDTOOL_COMMON_BAZEL_TYPES_HPP
+
+/// \file bazel_types.hpp
+/// \brief This file contains commonly used aliases for Bazel API
+/// Never include this file in any other header!
+
+#ifdef BOOTSTRAP_BUILD_TOOL
+
+namespace build::bazel::remote::execution::v2 {
+struct Digest {
+ std::string hash_;
+ int64_t size_bytes_;
+
+ auto hash() const& noexcept -> std::string const& { return hash_; }
+
+ auto size_bytes() const noexcept -> int64_t { return size_bytes_; }
+
+ void set_size_bytes(int64_t size_bytes) { size_bytes_ = size_bytes; }
+
+ void set_hash(std::string hash) { hash_ = hash; }
+
+ std::string* mutable_hash() { return &hash_; }
+};
+} // namespace build::bazel::remote::execution::v2
+
+namespace google::protobuf {
+using int64 = int64_t;
+}
+
+#else
+
+#include "build/bazel/remote/execution/v2/remote_execution.grpc.pb.h"
+
+#endif
+
+/// \brief Alias namespace for bazel remote execution
+// NOLINTNEXTLINE(misc-unused-alias-decls)
+namespace bazel_re = build::bazel::remote::execution::v2;
+
+#ifdef BOOTSTRAP_BUILD_TOOL
+// not using protobuffers
+#else
+
+/// \brief Alias namespace for 'google::protobuf'
+namespace pb {
+// NOLINTNEXTLINE(google-build-using-namespace)
+using namespace google::protobuf;
+
+/// \brief Alias function for 'RepeatedFieldBackInserter'
+template <typename T>
+auto back_inserter(RepeatedField<T>* const f) {
+ return RepeatedFieldBackInserter(f);
+}
+
+/// \brief Alias function for 'RepeatedPtrFieldBackInserter'
+template <typename T>
+auto back_inserter(RepeatedPtrField<T>* const f) {
+ return RepeatedPtrFieldBackInserter(f);
+}
+
+} // namespace pb
+#endif
+
+namespace std {
+
+/// \brief Hash function to support bazel_re::Digest as std::map* key.
+template <>
+struct hash<bazel_re::Digest> {
+ auto operator()(bazel_re::Digest const& d) const noexcept -> std::size_t {
+ return std::hash<std::string>{}(d.hash());
+ }
+};
+
+/// \brief Equality function to support bazel_re::Digest as std::map* key.
+template <>
+struct equal_to<bazel_re::Digest> {
+ auto operator()(bazel_re::Digest const& lhs,
+ bazel_re::Digest const& rhs) const noexcept -> bool {
+ return lhs.hash() == rhs.hash();
+ }
+};
+
+} // namespace std
+
+#endif // INCLUDED_SRC_BUILDTOOL_COMMON_BAZEL_TYPES_HPP
diff --git a/src/buildtool/common/cli.hpp b/src/buildtool/common/cli.hpp
new file mode 100644
index 00000000..b4710147
--- /dev/null
+++ b/src/buildtool/common/cli.hpp
@@ -0,0 +1,365 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_CLI_HPP
+#define INCLUDED_SRC_BUILDTOOL_COMMON_CLI_HPP
+
+#include <cstdlib>
+#include <filesystem>
+#include <string>
+#include <thread>
+#include <vector>
+
+#include "CLI/CLI.hpp"
+#include "fmt/core.h"
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/logging/log_level.hpp"
+
+constexpr auto kDefaultLogLevel = LogLevel::Info;
+
+/// \brief Arguments common to all commands.
+struct CommonArguments {
+ std::optional<std::filesystem::path> workspace_root{};
+ std::optional<std::filesystem::path> repository_config{};
+ std::optional<std::string> main{};
+ std::optional<std::filesystem::path> log_file{};
+ std::size_t jobs{std::max(1U, std::thread::hardware_concurrency())};
+ LogLevel log_limit{kDefaultLogLevel};
+};
+
+/// \brief Arguments required for analysing targets.
+struct AnalysisArguments {
+ std::filesystem::path config_file{};
+ std::optional<nlohmann::json> target{};
+ std::optional<std::string> target_file_name{};
+ std::optional<std::string> rule_file_name{};
+ std::optional<std::string> expression_file_name{};
+ std::optional<std::filesystem::path> target_root{};
+ std::optional<std::filesystem::path> rule_root{};
+ std::optional<std::filesystem::path> expression_root{};
+ std::optional<std::filesystem::path> graph_file{};
+ std::optional<std::filesystem::path> artifacts_to_build_file{};
+};
+
+/// \brief Arguments required for running diagnostics.
+struct DiagnosticArguments {
+ std::optional<std::string> dump_actions{std::nullopt};
+ std::optional<std::string> dump_blobs{std::nullopt};
+ std::optional<std::string> dump_trees{std::nullopt};
+ std::optional<std::string> dump_targets{std::nullopt};
+ std::optional<std::string> dump_anonymous{std::nullopt};
+ std::optional<std::string> dump_nodes{std::nullopt};
+};
+
+/// \brief Arguments required for specifying cache/build endpoint.
+struct EndpointArguments {
+ std::optional<std::filesystem::path> local_root{};
+ std::optional<std::string> remote_execution_address;
+};
+
+/// \brief Arguments required for building.
+struct BuildArguments {
+ std::optional<std::vector<std::string>> local_launcher{std::nullopt};
+ std::map<std::string, std::string> platform_properties;
+ std::size_t build_jobs{};
+ std::optional<std::string> dump_artifacts{std::nullopt};
+ std::optional<std::string> print_to_stdout{std::nullopt};
+ bool persistent_build_dir{false};
+ bool show_runfiles{false};
+};
+
+/// \brief Arguments required for staging.
+struct StageArguments {
+ std::filesystem::path output_dir{};
+};
+
+/// \brief Arguments required for rebuilding.
+struct RebuildArguments {
+ std::optional<std::string> cache_endpoint{};
+ std::optional<std::filesystem::path> dump_flaky{};
+};
+
+/// \brief Arguments for fetching artifacts from CAS.
+struct FetchArguments {
+ std::string object_id{};
+ std::optional<std::filesystem::path> output_path{};
+};
+
+/// \brief Arguments required for running from graph file.
+struct GraphArguments {
+ nlohmann::json artifacts{};
+ std::filesystem::path graph_file{};
+ std::optional<std::filesystem::path> git_cas{};
+};
+
+static inline auto SetupCommonArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<CommonArguments*> const& clargs) {
+ app->add_option("-C,--repository_config",
+ clargs->repository_config,
+ "Path to configuration file for multi-repository builds.")
+ ->type_name("PATH");
+ app->add_option(
+ "--main", clargs->main, "The repository to take the target from.")
+ ->type_name("NAME");
+ app->add_option_function<std::string>(
+ "-w,--workspace_root",
+ [clargs](auto const& workspace_root_raw) {
+ clargs->workspace_root = std::filesystem::canonical(
+ std::filesystem::absolute(workspace_root_raw));
+ },
+ "Path of the workspace's root directory.")
+ ->type_name("PATH");
+ app->add_option("-j,--jobs",
+ clargs->jobs,
+ "Number of jobs to run (Default: Number of cores).")
+ ->type_name("NUM");
+ app->add_option(
+ "-f,--log-file", clargs->log_file, "Path to local log file.")
+ ->type_name("PATH");
+ app->add_option_function<std::underlying_type_t<LogLevel>>(
+ "--log-limit",
+ [clargs](auto const& limit) {
+ clargs->log_limit = ToLogLevel(limit);
+ },
+ fmt::format("Log limit in interval [{},{}] (Default: {}).",
+ kFirstLogLevel,
+ kLastLogLevel,
+ kDefaultLogLevel))
+ ->type_name("NUM");
+}
+
+static inline auto SetupAnalysisArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<AnalysisArguments*> const& clargs,
+ bool with_graph = true) {
+ app->add_option(
+ "-c,--config", clargs->config_file, "Path to configuration file.")
+ ->type_name("PATH");
+ app->add_option_function<std::vector<std::string>>(
+ "target",
+ [clargs](auto const& target_raw) {
+ if (target_raw.size() > 1) {
+ clargs->target =
+ nlohmann::json{target_raw[0], target_raw[1]};
+ }
+ else {
+ clargs->target = nlohmann::json{target_raw[0]}[0];
+ }
+ },
+ "Module and target name to build.\n"
+ "Assumes current module if module name is omitted.")
+ ->expected(2);
+ app->add_option("--target_root",
+ clargs->target_root,
+ "Path of the target files' root directory.\n"
+ "Default: Same as --workspace_root")
+ ->type_name("PATH");
+ app->add_option("--rule_root",
+ clargs->rule_root,
+ "Path of the rule files' root directory.\n"
+ "Default: Same as --target_root")
+ ->type_name("PATH");
+ app->add_option("--expression_root",
+ clargs->expression_root,
+ "Path of the expression files' root directory.\n"
+ "Default: Same as --rule_root")
+ ->type_name("PATH");
+ app->add_option("--target_file_name",
+ clargs->target_file_name,
+ "Name of the targets file.");
+ app->add_option(
+ "--rule_file_name", clargs->rule_file_name, "Name of the rules file.");
+ app->add_option("--expression_file_name",
+ clargs->expression_file_name,
+ "Name of the expressions file.");
+ if (with_graph) {
+ app->add_option(
+ "--dump_graph",
+ clargs->graph_file,
+ "File path for writing the action graph description to.")
+ ->type_name("PATH");
+ app->add_option("--dump_artifacts_to_build",
+ clargs->artifacts_to_build_file,
+ "File path for writing the artifacts to build to.")
+ ->type_name("PATH");
+ }
+}
+
+static inline auto SetupDiagnosticArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<DiagnosticArguments*> const& clargs) {
+ app->add_option("--dump_actions",
+ clargs->dump_actions,
+ "Dump actions to file (use - for stdout).")
+ ->type_name("PATH");
+ app->add_option("--dump_trees",
+ clargs->dump_trees,
+ "Dump trees to file (use - for stdout).")
+ ->type_name("PATH");
+ app->add_option("--dump_blobs",
+ clargs->dump_blobs,
+ "Dump blobs to file (use - for stdout).")
+ ->type_name("PATH");
+ app->add_option("--dump_targets",
+ clargs->dump_targets,
+ "Dump targets to file (use - for stdout).")
+ ->type_name("PATH");
+ app->add_option("--dump_anonymous",
+ clargs->dump_anonymous,
+ "Dump anonymous targets to file (use - for stdout).")
+ ->type_name("PATH");
+ app->add_option("--dump_nodes",
+ clargs->dump_nodes,
+ "Dump nodes of target to file (use - for stdout).")
+ ->type_name("PATH");
+}
+
+static inline auto SetupEndpointArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<EndpointArguments*> const& clargs) {
+ app->add_option_function<std::string>(
+ "--local_build_root",
+ [clargs](auto const& build_root_raw) {
+ clargs->local_root = std::filesystem::weakly_canonical(
+ std::filesystem::absolute(build_root_raw));
+ },
+ "Root for local CAS, cache, and build directories.")
+ ->type_name("PATH");
+
+ app->add_option("-r,--remote_execution_address",
+ clargs->remote_execution_address,
+ "Address of the remote execution service.")
+ ->type_name("NAME:PORT");
+}
+
+static inline auto SetupBuildArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<BuildArguments*> const& clargs) {
+ app->add_flag("-p,--persistent",
+ clargs->persistent_build_dir,
+ "Do not clean build directory after execution.");
+
+ app->add_option_function<std::string>(
+ "-L,--local_launcher",
+ [clargs](auto const& launcher_raw) {
+ clargs->local_launcher =
+ nlohmann::json::parse(launcher_raw)
+ .template get<std::vector<std::string>>();
+ },
+ "JSON array with the list of strings representing the launcher to "
+ "prepend actions' commands before being executed locally.")
+ ->type_name("JSON")
+ ->default_val(nlohmann::json{"env", "--"}.dump());
+
+ app->add_option_function<std::string>(
+ "--remote_execution_property",
+ [clargs](auto const& property) {
+ std::istringstream pss(property);
+ std::string key;
+ std::string val;
+ if (not std::getline(pss, key, ':') or
+ not std::getline(pss, val, ':')) {
+ throw CLI::ConversionError{property,
+ "--remote_execution_property"};
+ }
+ clargs->platform_properties.emplace(std::move(key),
+ std::move(val));
+ },
+ "Property for remote execution as key-value pair.")
+ ->type_name("KEY:VAL")
+ ->allow_extra_args(false)
+ ->expected(1, 1);
+
+ app->add_option(
+ "-J,--build_jobs",
+ clargs->build_jobs,
+ "Number of jobs to run during build phase (Default: same as jobs).")
+ ->type_name("NUM");
+ app->add_option("--dump_artifacts",
+ clargs->dump_artifacts,
+ "Dump artifacts to file (use - for stdout).")
+ ->type_name("PATH");
+
+ app->add_flag("-s,--show_runfiles",
+ clargs->show_runfiles,
+ "Do not omit runfiles in build report.");
+
+ app->add_option("-P,--print_to_stdout",
+ clargs->print_to_stdout,
+ "After building, print the specified artifact to stdout.")
+ ->type_name("LOGICAL_PATH");
+}
+
+static inline auto SetupStageArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<StageArguments*> const& clargs) {
+ app->add_option_function<std::string>(
+ "-o,--output_dir",
+ [clargs](auto const& output_dir_raw) {
+ clargs->output_dir = std::filesystem::weakly_canonical(
+ std::filesystem::absolute(output_dir_raw));
+ },
+ "Path of the directory where outputs will be copied.")
+ ->type_name("PATH")
+ ->required();
+}
+
+static inline auto SetupRebuildArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<RebuildArguments*> const& clargs) {
+ app->add_option_function<std::string>(
+ "--vs",
+ [clargs](auto const& cache_endpoint) {
+ clargs->cache_endpoint = cache_endpoint;
+ },
+ "Cache endpoint to compare against (use \"local\" for local cache).")
+ ->type_name("NAME:PORT|\"local\"");
+
+ app->add_option(
+ "--dump_flaky", clargs->dump_flaky, "Dump flaky actions to file.")
+ ->type_name("PATH");
+}
+
+static inline auto SetupFetchArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<FetchArguments*> const& clargs) {
+ app->add_option(
+ "object_id",
+ clargs->object_id,
+ "Object identifier with the format '[<hash>:<size>:<type>]'.")
+ ->required();
+
+ app->add_option_function<std::string>(
+ "-o,--output_path",
+ [clargs](auto const& output_path_raw) {
+ clargs->output_path = std::filesystem::weakly_canonical(
+ std::filesystem::absolute(output_path_raw));
+ },
+ "Install path for the artifact. (omit to dump to stdout)")
+ ->type_name("PATH");
+}
+
+static inline auto SetupGraphArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<GraphArguments*> const& clargs) {
+ app->add_option_function<std::string>(
+ "-a,--artifacts",
+ [clargs](auto const& artifact_map_raw) {
+ clargs->artifacts = nlohmann::json::parse(artifact_map_raw);
+ },
+ "Json object with key/value pairs formed by the relative path in which "
+ "artifact is to be copied and the description of the artifact as json "
+ "object as well.");
+
+ app->add_option("-g,--graph_file",
+ clargs->graph_file,
+ "Path of the file containing the description of the "
+ "actions.")
+ ->required();
+
+ app->add_option("--git_cas",
+ clargs->git_cas,
+ "Path to a Git repository, containing blobs of potentially "
+ "missing KNOWN artifacts.");
+}
+
+#endif // INCLUDED_SRC_BUILDTOOL_COMMON_CLI_HPP
diff --git a/src/buildtool/common/identifier.hpp b/src/buildtool/common/identifier.hpp
new file mode 100644
index 00000000..1d9875fd
--- /dev/null
+++ b/src/buildtool/common/identifier.hpp
@@ -0,0 +1,25 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_IDENTIFIER_HPP
+#define INCLUDED_SRC_BUILDTOOL_COMMON_IDENTIFIER_HPP
+
+#include <iomanip>
+#include <sstream>
+#include <string>
+
+// Global artifact identifier (not the CAS hash)
+using ArtifactIdentifier = std::string;
+
+// Global action identifier
+using ActionIdentifier = std::string;
+
+static inline auto IdentifierToString(const std::string& id) -> std::string {
+ std::ostringstream encoded{};
+ encoded << std::hex << std::setfill('0');
+ for (auto const& b : id) {
+ encoded << std::setw(2)
+ << static_cast<int>(
+ static_cast<std::make_unsigned_t<
+ std::remove_reference_t<decltype(b)>>>(b));
+ }
+ return encoded.str();
+}
+#endif // INCLUDED_SRC_BUILDTOOL_COMMON_IDENTIFIER_HPP
diff --git a/src/buildtool/common/repository_config.hpp b/src/buildtool/common/repository_config.hpp
new file mode 100644
index 00000000..62c2c4ff
--- /dev/null
+++ b/src/buildtool/common/repository_config.hpp
@@ -0,0 +1,133 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_REPOSITORY_CONFIG_HPP
+#define INCLUDED_SRC_BUILDTOOL_COMMON_REPOSITORY_CONFIG_HPP
+
+#include <filesystem>
+#include <string>
+#include <unordered_map>
+
+#include "src/buildtool/file_system/file_root.hpp"
+#include "src/buildtool/file_system/git_cas.hpp"
+
+class RepositoryConfig {
+ public:
+ struct RepositoryInfo {
+ FileRoot workspace_root;
+ FileRoot target_root{workspace_root};
+ FileRoot rule_root{target_root};
+ FileRoot expression_root{rule_root};
+ std::unordered_map<std::string, std::string> name_mapping{};
+ std::string target_file_name{"TARGETS"};
+ std::string rule_file_name{"RULES"};
+ std::string expression_file_name{"EXPRESSIONS"};
+ };
+
+ [[nodiscard]] static auto Instance() noexcept -> RepositoryConfig& {
+ static RepositoryConfig instance{};
+ return instance;
+ }
+
+ void SetInfo(std::string const& repo, RepositoryInfo&& info) {
+ infos_.emplace(repo, std::move(info));
+ }
+
+ [[nodiscard]] auto SetGitCAS(
+ std::filesystem::path const& repo_path) noexcept {
+ git_cas_ = GitCAS::Open(repo_path);
+ return static_cast<bool>(git_cas_);
+ }
+
+ [[nodiscard]] auto Info(std::string const& repo) const noexcept
+ -> RepositoryInfo const* {
+ auto it = infos_.find(repo);
+ if (it != infos_.end()) {
+ return &it->second;
+ }
+ return nullptr;
+ }
+
+ [[nodiscard]] auto ReadBlobFromGitCAS(std::string const& hex_id)
+ const noexcept -> std::optional<std::string> {
+ return git_cas_ ? git_cas_->ReadObject(hex_id, /*is_hex_id=*/true)
+ : std::nullopt;
+ }
+
+ [[nodiscard]] auto WorkspaceRoot(std::string const& repo) const noexcept
+ -> FileRoot const* {
+ return Get<FileRoot>(
+ repo, [](auto const& info) { return &info.workspace_root; });
+ }
+
+ [[nodiscard]] auto TargetRoot(std::string const& repo) const noexcept
+ -> FileRoot const* {
+ return Get<FileRoot>(
+ repo, [](auto const& info) { return &info.target_root; });
+ }
+
+ [[nodiscard]] auto RuleRoot(std::string const& repo) const
+ -> FileRoot const* {
+ return Get<FileRoot>(repo,
+ [](auto const& info) { return &info.rule_root; });
+ }
+
+ [[nodiscard]] auto ExpressionRoot(std::string const& repo) const noexcept
+ -> FileRoot const* {
+ return Get<FileRoot>(
+ repo, [](auto const& info) { return &info.expression_root; });
+ }
+
+ [[nodiscard]] auto GlobalName(std::string const& repo,
+ std::string const& local_name) const noexcept
+ -> std::string const* {
+ return Get<std::string>(
+ repo, [&local_name](auto const& info) -> std::string const* {
+ auto it = info.name_mapping.find(local_name);
+ if (it != info.name_mapping.end()) {
+ return &it->second;
+ }
+ return nullptr;
+ });
+ }
+
+ [[nodiscard]] auto TargetFileName(std::string const& repo) const noexcept
+ -> std::string const* {
+ return Get<std::string>(
+ repo, [](auto const& info) { return &info.target_file_name; });
+ }
+
+ [[nodiscard]] auto RuleFileName(std::string const& repo) const noexcept
+ -> std::string const* {
+ return Get<std::string>(
+ repo, [](auto const& info) { return &info.rule_file_name; });
+ }
+
+ [[nodiscard]] auto ExpressionFileName(
+ std::string const& repo) const noexcept -> std::string const* {
+ return Get<std::string>(
+ repo, [](auto const& info) { return &info.expression_file_name; });
+ }
+
+ // used for testing
+ void Reset() {
+ infos_.clear();
+ git_cas_.reset();
+ }
+
+ private:
+ std::unordered_map<std::string, RepositoryInfo> infos_;
+ GitCASPtr git_cas_;
+
+ template <class T>
+ [[nodiscard]] auto Get(std::string const& repo,
+ std::function<T const*(RepositoryInfo const&)> const&
+ getter) const noexcept -> T const* {
+ if (auto const* info = Info(repo)) {
+ try { // satisfy clang-tidy's bugprone-exception-escape
+ return getter(*info);
+ } catch (...) {
+ }
+ }
+ return nullptr;
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_COMMON_REPOSITORY_CONFIG_HPP
diff --git a/src/buildtool/common/statistics.hpp b/src/buildtool/common/statistics.hpp
new file mode 100644
index 00000000..a7b89791
--- /dev/null
+++ b/src/buildtool/common/statistics.hpp
@@ -0,0 +1,61 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_STATISTICS_HPP
+#define INCLUDED_SRC_BUILDTOOL_COMMON_STATISTICS_HPP
+
+#include <atomic>
+
+class Statistics {
+ public:
+ [[nodiscard]] static auto Instance() noexcept -> Statistics& {
+ static Statistics instance{};
+ return instance;
+ }
+
+ void Reset() noexcept {
+ num_actions_queued_ = 0;
+ num_actions_cached_ = 0;
+ num_actions_flaky_ = 0;
+ num_actions_flaky_tainted_ = 0;
+ num_rebuilt_actions_compared_ = 0;
+ num_rebuilt_actions_missing_ = 0;
+ }
+ void IncrementActionsQueuedCounter() noexcept { ++num_actions_queued_; }
+ void IncrementActionsCachedCounter() noexcept { ++num_actions_cached_; }
+ void IncrementActionsFlakyCounter() noexcept { ++num_actions_flaky_; }
+ void IncrementActionsFlakyTaintedCounter() noexcept {
+ ++num_actions_flaky_tainted_;
+ }
+ void IncrementRebuiltActionMissingCounter() noexcept {
+ ++num_rebuilt_actions_missing_;
+ }
+ void IncrementRebuiltActionComparedCounter() noexcept {
+ ++num_rebuilt_actions_compared_;
+ }
+ [[nodiscard]] auto ActionsQueuedCounter() const noexcept -> int {
+ return num_actions_queued_;
+ }
+ [[nodiscard]] auto ActionsCachedCounter() const noexcept -> int {
+ return num_actions_cached_;
+ }
+ [[nodiscard]] auto ActionsFlakyCounter() const noexcept -> int {
+ return num_actions_flaky_;
+ }
+ [[nodiscard]] auto ActionsFlakyTaintedCounter() const noexcept -> int {
+ return num_actions_flaky_tainted_;
+ }
+ [[nodiscard]] auto RebuiltActionMissingCounter() const noexcept -> int {
+ return num_rebuilt_actions_missing_;
+ }
+ [[nodiscard]] auto RebuiltActionComparedCounter() const noexcept -> int {
+ return num_rebuilt_actions_compared_;
+ }
+
+ private:
+ std::atomic<int> num_actions_queued_{};
+ std::atomic<int> num_actions_cached_{};
+ std::atomic<int> num_actions_flaky_{};
+ std::atomic<int> num_actions_flaky_tainted_{};
+ std::atomic<int> num_rebuilt_actions_missing_{};
+ std::atomic<int> num_rebuilt_actions_compared_{};
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_COMMON_STATISTICS_HPP
diff --git a/src/buildtool/common/tree.hpp b/src/buildtool/common/tree.hpp
new file mode 100644
index 00000000..512eda86
--- /dev/null
+++ b/src/buildtool/common/tree.hpp
@@ -0,0 +1,72 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_COMMON_TREE_HPP
+#define INCLUDED_SRC_BUILDTOOL_COMMON_TREE_HPP
+
+#include <string>
+#include <unordered_map>
+
+#include "nlohmann/json.hpp"
+#include "src/buildtool/common/action_description.hpp"
+#include "src/buildtool/common/artifact_description.hpp"
+
+// Describes tree, its inputs, output (tree artifact), and action (tree action).
+class Tree {
+ using inputs_t = ActionDescription::inputs_t;
+
+ public:
+ explicit Tree(inputs_t&& inputs)
+ : id_{ComputeId(inputs)}, inputs_{std::move(inputs)} {}
+
+ [[nodiscard]] auto Id() const& -> std::string const& { return id_; }
+ [[nodiscard]] auto Id() && -> std::string { return std::move(id_); }
+
+ [[nodiscard]] auto ToJson() const -> nlohmann::json {
+ return ComputeDescription(inputs_);
+ }
+
+ [[nodiscard]] auto Inputs() const -> inputs_t { return inputs_; }
+
+ [[nodiscard]] auto Action() const -> ActionDescription {
+ return {
+ {/*unused*/}, {/*unused*/}, Action::CreateTreeAction(id_), inputs_};
+ }
+
+ [[nodiscard]] auto Output() const -> ArtifactDescription {
+ return ArtifactDescription{id_};
+ }
+
+ [[nodiscard]] static auto FromJson(std::string const& id,
+ nlohmann::json const& json)
+ -> std::optional<Tree> {
+ auto inputs = inputs_t{};
+ inputs.reserve(json.size());
+ for (auto const& [path, artifact] : json.items()) {
+ auto artifact_desc = ArtifactDescription::FromJson(artifact);
+ if (not artifact_desc) {
+ return std::nullopt;
+ }
+ inputs.emplace(path, std::move(*artifact_desc));
+ }
+ return Tree{id, std::move(inputs)};
+ }
+
+ private:
+ std::string id_;
+ inputs_t inputs_;
+
+ Tree(std::string id, inputs_t&& inputs)
+ : id_{std::move(id)}, inputs_{std::move(inputs)} {}
+
+ static auto ComputeDescription(inputs_t const& inputs) -> nlohmann::json {
+ auto json = nlohmann::json::object();
+ for (auto const& [path, artifact] : inputs) {
+ json[path] = artifact.ToJson();
+ }
+ return json;
+ }
+
+ static auto ComputeId(inputs_t const& inputs) -> std::string {
+ return ComputeHash(ComputeDescription(inputs).dump());
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_COMMON_TREE_HPP
diff --git a/src/buildtool/crypto/TARGETS b/src/buildtool/crypto/TARGETS
new file mode 100644
index 00000000..d68758ed
--- /dev/null
+++ b/src/buildtool/crypto/TARGETS
@@ -0,0 +1,31 @@
+{ "hash_impl":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["hash_impl"]
+ , "hdrs":
+ [ "hash_impl.hpp"
+ , "hash_impl_md5.hpp"
+ , "hash_impl_sha1.hpp"
+ , "hash_impl_sha256.hpp"
+ , "hash_impl_git.hpp"
+ ]
+ , "srcs":
+ [ "hash_impl_md5.cpp"
+ , "hash_impl_sha1.cpp"
+ , "hash_impl_sha256.cpp"
+ , "hash_impl_git.cpp"
+ ]
+ , "deps":
+ [ ["src/buildtool/logging", "logging"]
+ , ["src/utils/cpp", "hex_string"]
+ , ["@", "ssl", "", "crypto"]
+ ]
+ , "stage": ["src", "buildtool", "crypto"]
+ }
+, "hash_generator":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["hash_generator"]
+ , "hdrs": ["hash_generator.hpp"]
+ , "deps": ["hash_impl"]
+ , "stage": ["src", "buildtool", "crypto"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/crypto/hash_generator.hpp b/src/buildtool/crypto/hash_generator.hpp
new file mode 100644
index 00000000..453a9c06
--- /dev/null
+++ b/src/buildtool/crypto/hash_generator.hpp
@@ -0,0 +1,130 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_GENERATOR_HPP
+#define INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_GENERATOR_HPP
+
+#include <iomanip>
+#include <memory>
+#include <optional>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "src/buildtool/crypto/hash_impl_git.hpp"
+#include "src/buildtool/crypto/hash_impl_md5.hpp"
+#include "src/buildtool/crypto/hash_impl_sha1.hpp"
+#include "src/buildtool/crypto/hash_impl_sha256.hpp"
+#include "src/utils/cpp/hex_string.hpp"
+
+/// \brief Hash generator, supports multiple types \ref HashType.
+class HashGenerator {
+ public:
+ /// \brief Types of hash implementations supported by generator.
+ enum class HashType { MD5, SHA1, SHA256, GIT };
+
+ /// \brief The universal hash digest.
+ /// The type of hash and the digest length depends on the hash
+ /// implementation used to generated this digest.
+ class HashDigest {
+ friend HashGenerator;
+
+ public:
+ /// \brief Get pointer to raw bytes of digest.
+ /// Length can be obtained using \ref Length.
+ [[nodiscard]] auto Bytes() const -> std::string const& {
+ return bytes_;
+ }
+
+ /// \brief Get hexadecimal string of digest.
+ /// Length is twice the length of raw bytes (\ref Length).
+ [[nodiscard]] auto HexString() const -> std::string {
+ return ToHexString(bytes_);
+ }
+
+ /// \brief Get digest length in raw bytes.
+ [[nodiscard]] auto Length() const -> std::size_t {
+ return bytes_.size();
+ }
+
+ private:
+ std::string bytes_{};
+
+ explicit HashDigest(std::string bytes) : bytes_{std::move(bytes)} {}
+ };
+
+ /// \brief Incremental hasher.
+ class Hasher {
+ friend HashGenerator;
+
+ public:
+ /// \brief Feed data to the hasher.
+ auto Update(std::string const& data) noexcept -> bool {
+ return impl_->Update(data);
+ }
+
+ /// \brief Finalize hash.
+ [[nodiscard]] auto Finalize() && noexcept -> std::optional<HashDigest> {
+ auto hash = std::move(*impl_).Finalize();
+ if (hash) {
+ return HashDigest{*hash};
+ }
+ return std::nullopt;
+ }
+
+ private:
+ std::unique_ptr<IHashImpl> impl_;
+
+ explicit Hasher(std::unique_ptr<IHashImpl> impl)
+ : impl_{std::move(impl)} {}
+ };
+
+ /// \brief Create hash generator for specific type.
+ explicit HashGenerator(HashType type)
+ : type_{type}, digest_length_{create_impl()->DigestLength()} {}
+ HashGenerator(HashGenerator const&) = delete;
+ HashGenerator(HashGenerator&&) = delete;
+ auto operator=(HashGenerator const&) -> HashGenerator& = delete;
+ auto operator=(HashGenerator &&) -> HashGenerator& = delete;
+ ~HashGenerator() noexcept = default;
+
+ /// \brief Run hash function on data.
+ [[nodiscard]] auto Run(std::string const& data) const noexcept
+ -> HashDigest {
+ auto impl = create_impl();
+ return HashDigest{std::move(*impl).Compute(data)};
+ }
+
+ [[nodiscard]] auto IncrementalHasher() const noexcept -> Hasher {
+ return Hasher(create_impl());
+ }
+
+ [[nodiscard]] auto DigestLength() const noexcept -> std::size_t {
+ return digest_length_;
+ }
+
+ private:
+ HashType type_{};
+ std::size_t digest_length_{};
+
+ /// \brief Dispatch for creating the actual implementation
+ [[nodiscard]] auto create_impl() const noexcept
+ -> std::unique_ptr<IHashImpl> {
+ switch (type_) {
+ case HashType::MD5:
+ return CreateHashImplMd5();
+ case HashType::SHA1:
+ return CreateHashImplSha1();
+ case HashType::SHA256:
+ return CreateHashImplSha256();
+ case HashType::GIT:
+ return CreateHashImplGit();
+ }
+ }
+};
+
+/// \brief Hash function used for the entire buildtool
+[[maybe_unused]] [[nodiscard]] static inline auto ComputeHash(
+ std::string const& data) noexcept -> std::string {
+ static HashGenerator gen{HashGenerator::HashType::GIT};
+ return gen.Run(data).HexString();
+}
+
+#endif // INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_GENERATOR_HPP
diff --git a/src/buildtool/crypto/hash_impl.hpp b/src/buildtool/crypto/hash_impl.hpp
new file mode 100644
index 00000000..aa5a9559
--- /dev/null
+++ b/src/buildtool/crypto/hash_impl.hpp
@@ -0,0 +1,40 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_HPP
+#define INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_HPP
+
+#include <optional>
+#include <string>
+
+#include "src/buildtool/logging/logger.hpp"
+
+/// \brief Interface for hash implementations
+class IHashImpl {
+ public:
+ IHashImpl() noexcept = default;
+ IHashImpl(IHashImpl const&) = default;
+ IHashImpl(IHashImpl&&) = default;
+ auto operator=(IHashImpl const&) -> IHashImpl& = default;
+ auto operator=(IHashImpl &&) -> IHashImpl& = default;
+ virtual ~IHashImpl() = default;
+
+ /// \brief Feed data to the incremental hashing.
+ [[nodiscard]] virtual auto Update(std::string const& data) noexcept
+ -> bool = 0;
+
+ /// \brief Finalize the hashing and return hash as string of raw bytes.
+ [[nodiscard]] virtual auto Finalize() && noexcept
+ -> std::optional<std::string> = 0;
+
+ /// \brief Compute the hash of data and return it as string of raw bytes.
+ [[nodiscard]] virtual auto Compute(std::string const& data) && noexcept
+ -> std::string = 0;
+
+ /// \brief Get length of the hash in raw bytes.
+ [[nodiscard]] virtual auto DigestLength() const noexcept -> std::size_t = 0;
+
+ static auto FatalError() noexcept -> void {
+ Logger::Log(LogLevel::Error, "Failed to compute hash.");
+ std::terminate();
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_HPP
diff --git a/src/buildtool/crypto/hash_impl_git.cpp b/src/buildtool/crypto/hash_impl_git.cpp
new file mode 100644
index 00000000..9cb2a761
--- /dev/null
+++ b/src/buildtool/crypto/hash_impl_git.cpp
@@ -0,0 +1,42 @@
+#include <array>
+#include <cstdint>
+
+#include "openssl/sha.h"
+#include "src/buildtool/crypto/hash_impl.hpp"
+
+/// \brief Hash implementation for Git blob ids.
+/// Does not support incremental hashing.
+class HashImplGit final : public IHashImpl {
+ public:
+ auto Update(std::string const& /*data*/) noexcept -> bool final {
+ return false;
+ }
+
+ auto Finalize() && noexcept -> std::optional<std::string> final {
+ return std::nullopt;
+ }
+
+ auto Compute(std::string const& data) && noexcept -> std::string final {
+ SHA_CTX ctx;
+ std::string const header{"blob " + std::to_string(data.size()) + '\0'};
+ if (SHA1_Init(&ctx) == 1 &&
+ SHA1_Update(&ctx, header.data(), header.size()) == 1 &&
+ SHA1_Update(&ctx, data.data(), data.size()) == 1) {
+ auto out = std::array<std::uint8_t, SHA_DIGEST_LENGTH>{};
+ if (SHA1_Final(out.data(), &ctx) == 1) {
+ return std::string{out.begin(), out.end()};
+ }
+ }
+ FatalError();
+ return {};
+ }
+
+ [[nodiscard]] auto DigestLength() const noexcept -> std::size_t final {
+ return SHA_DIGEST_LENGTH;
+ }
+};
+
+/// \brief Factory for Git implementation
+auto CreateHashImplGit() -> std::unique_ptr<IHashImpl> {
+ return std::make_unique<HashImplGit>();
+}
diff --git a/src/buildtool/crypto/hash_impl_git.hpp b/src/buildtool/crypto/hash_impl_git.hpp
new file mode 100644
index 00000000..be0738da
--- /dev/null
+++ b/src/buildtool/crypto/hash_impl_git.hpp
@@ -0,0 +1,10 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_GIT_HPP
+#define INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_GIT_HPP
+
+#include <memory>
+
+#include "src/buildtool/crypto/hash_impl.hpp"
+
+[[nodiscard]] extern auto CreateHashImplGit() -> std::unique_ptr<IHashImpl>;
+
+#endif // INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_GIT_HPP
diff --git a/src/buildtool/crypto/hash_impl_md5.cpp b/src/buildtool/crypto/hash_impl_md5.cpp
new file mode 100644
index 00000000..106dc984
--- /dev/null
+++ b/src/buildtool/crypto/hash_impl_md5.cpp
@@ -0,0 +1,50 @@
+#include <array>
+#include <cstdint>
+
+#include "openssl/md5.h"
+#include "src/buildtool/crypto/hash_impl.hpp"
+
+/// \brief Hash implementation for MD5
+class HashImplMd5 final : public IHashImpl {
+ public:
+ HashImplMd5() { initialized_ = MD5_Init(&ctx_) == 1; }
+
+ auto Update(std::string const& data) noexcept -> bool final {
+ return initialized_ and
+ MD5_Update(&ctx_, data.data(), data.size()) == 1;
+ }
+
+ auto Finalize() && noexcept -> std::optional<std::string> final {
+ if (initialized_) {
+ auto out = std::array<std::uint8_t, MD5_DIGEST_LENGTH>{};
+ if (MD5_Final(out.data(), &ctx_) == 1) {
+ return std::string{out.begin(), out.end()};
+ }
+ }
+ return std::nullopt;
+ }
+
+ auto Compute(std::string const& data) && noexcept -> std::string final {
+ if (Update(data)) {
+ auto digest = std::move(*this).Finalize();
+ if (digest) {
+ return *digest;
+ }
+ }
+ FatalError();
+ return {};
+ }
+
+ [[nodiscard]] auto DigestLength() const noexcept -> std::size_t final {
+ return MD5_DIGEST_LENGTH;
+ }
+
+ private:
+ MD5_CTX ctx_{};
+ bool initialized_{};
+};
+
+/// \brief Factory for MD5 implementation
+auto CreateHashImplMd5() -> std::unique_ptr<IHashImpl> {
+ return std::make_unique<HashImplMd5>();
+}
diff --git a/src/buildtool/crypto/hash_impl_md5.hpp b/src/buildtool/crypto/hash_impl_md5.hpp
new file mode 100644
index 00000000..95411570
--- /dev/null
+++ b/src/buildtool/crypto/hash_impl_md5.hpp
@@ -0,0 +1,10 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_MD5_HPP
+#define INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_MD5_HPP
+
+#include <memory>
+
+#include "src/buildtool/crypto/hash_impl.hpp"
+
+[[nodiscard]] extern auto CreateHashImplMd5() -> std::unique_ptr<IHashImpl>;
+
+#endif // INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_MD5_HPP
diff --git a/src/buildtool/crypto/hash_impl_sha1.cpp b/src/buildtool/crypto/hash_impl_sha1.cpp
new file mode 100644
index 00000000..e0bee0fb
--- /dev/null
+++ b/src/buildtool/crypto/hash_impl_sha1.cpp
@@ -0,0 +1,50 @@
+#include <array>
+#include <cstdint>
+
+#include "openssl/sha.h"
+#include "src/buildtool/crypto/hash_impl.hpp"
+
+/// \brief Hash implementation for SHA-1
+class HashImplSha1 final : public IHashImpl {
+ public:
+ HashImplSha1() { initialized_ = SHA1_Init(&ctx_) == 1; }
+
+ auto Update(std::string const& data) noexcept -> bool final {
+ return initialized_ and
+ SHA1_Update(&ctx_, data.data(), data.size()) == 1;
+ }
+
+ auto Finalize() && noexcept -> std::optional<std::string> final {
+ if (initialized_) {
+ auto out = std::array<std::uint8_t, SHA_DIGEST_LENGTH>{};
+ if (SHA1_Final(out.data(), &ctx_) == 1) {
+ return std::string{out.begin(), out.end()};
+ }
+ }
+ return std::nullopt;
+ }
+
+ auto Compute(std::string const& data) && noexcept -> std::string final {
+ if (Update(data)) {
+ auto digest = std::move(*this).Finalize();
+ if (digest) {
+ return *digest;
+ }
+ }
+ FatalError();
+ return {};
+ }
+
+ [[nodiscard]] auto DigestLength() const noexcept -> std::size_t final {
+ return SHA_DIGEST_LENGTH;
+ }
+
+ private:
+ SHA_CTX ctx_{};
+ bool initialized_{};
+};
+
+/// \brief Factory for SHA-1 implementation
+auto CreateHashImplSha1() -> std::unique_ptr<IHashImpl> {
+ return std::make_unique<HashImplSha1>();
+}
diff --git a/src/buildtool/crypto/hash_impl_sha1.hpp b/src/buildtool/crypto/hash_impl_sha1.hpp
new file mode 100644
index 00000000..7b8196b5
--- /dev/null
+++ b/src/buildtool/crypto/hash_impl_sha1.hpp
@@ -0,0 +1,10 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_SHA1_HPP
+#define INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_SHA1_HPP
+
+#include <memory>
+
+#include "src/buildtool/crypto/hash_impl.hpp"
+
+[[nodiscard]] extern auto CreateHashImplSha1() -> std::unique_ptr<IHashImpl>;
+
+#endif // INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_SHA1_HPP
diff --git a/src/buildtool/crypto/hash_impl_sha256.cpp b/src/buildtool/crypto/hash_impl_sha256.cpp
new file mode 100644
index 00000000..bbea10d3
--- /dev/null
+++ b/src/buildtool/crypto/hash_impl_sha256.cpp
@@ -0,0 +1,50 @@
+#include <array>
+#include <cstdint>
+
+#include "openssl/sha.h"
+#include "src/buildtool/crypto/hash_impl.hpp"
+
+/// \brief Hash implementation for SHA-256
+class HashImplSha256 final : public IHashImpl {
+ public:
+ HashImplSha256() { initialized_ = SHA256_Init(&ctx_) == 1; }
+
+ auto Update(std::string const& data) noexcept -> bool final {
+ return initialized_ and
+ SHA256_Update(&ctx_, data.data(), data.size()) == 1;
+ }
+
+ auto Finalize() && noexcept -> std::optional<std::string> final {
+ if (initialized_) {
+ auto out = std::array<std::uint8_t, SHA256_DIGEST_LENGTH>{};
+ if (SHA256_Final(out.data(), &ctx_) == 1) {
+ return std::string{out.begin(), out.end()};
+ }
+ }
+ return std::nullopt;
+ }
+
+ auto Compute(std::string const& data) && noexcept -> std::string final {
+ if (Update(data)) {
+ auto digest = std::move(*this).Finalize();
+ if (digest) {
+ return *digest;
+ }
+ }
+ FatalError();
+ return {};
+ }
+
+ [[nodiscard]] auto DigestLength() const noexcept -> std::size_t final {
+ return SHA256_DIGEST_LENGTH;
+ }
+
+ private:
+ SHA256_CTX ctx_{};
+ bool initialized_{};
+};
+
+/// \brief Factory for SHA-256 implementation
+auto CreateHashImplSha256() -> std::unique_ptr<IHashImpl> {
+ return std::make_unique<HashImplSha256>();
+}
diff --git a/src/buildtool/crypto/hash_impl_sha256.hpp b/src/buildtool/crypto/hash_impl_sha256.hpp
new file mode 100644
index 00000000..d74c1492
--- /dev/null
+++ b/src/buildtool/crypto/hash_impl_sha256.hpp
@@ -0,0 +1,10 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_SHA256_HPP
+#define INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_SHA256_HPP
+
+#include <memory>
+
+#include "src/buildtool/crypto/hash_impl.hpp"
+
+[[nodiscard]] extern auto CreateHashImplSha256() -> std::unique_ptr<IHashImpl>;
+
+#endif // INCLUDED_SRC_BUILDTOOL_CRYPTO_HASH_IMPL_SHA256_HPP
diff --git a/src/buildtool/execution_api/TARGETS b/src/buildtool/execution_api/TARGETS
new file mode 100644
index 00000000..9e26dfee
--- /dev/null
+++ b/src/buildtool/execution_api/TARGETS
@@ -0,0 +1 @@
+{} \ No newline at end of file
diff --git a/src/buildtool/execution_api/bazel_msg/TARGETS b/src/buildtool/execution_api/bazel_msg/TARGETS
new file mode 100644
index 00000000..19fe1277
--- /dev/null
+++ b/src/buildtool/execution_api/bazel_msg/TARGETS
@@ -0,0 +1,31 @@
+{ "bazel_msg":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["bazel_msg"]
+ , "hdrs": ["bazel_blob.hpp", "bazel_blob_container.hpp", "bazel_common.hpp"]
+ , "deps":
+ [ ["src/buildtool/crypto", "hash_generator"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["src/utils/cpp", "concepts"]
+ , ["src/utils/cpp", "type_safe_arithmetic"]
+ , ["@", "grpc", "", "grpc++"]
+ ]
+ , "proto": [["@", "bazel_remote_apis", "", "remote_execution_proto"]]
+ , "stage": ["src", "buildtool", "execution_api", "bazel_msg"]
+ }
+, "bazel_msg_factory":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["bazel_msg_factory"]
+ , "hdrs": ["bazel_msg_factory.hpp"]
+ , "srcs": ["bazel_msg_factory.cpp"]
+ , "deps":
+ [ "bazel_msg"
+ , ["src/buildtool/common", "common"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["src/buildtool/execution_engine/dag", "dag"]
+ , ["@", "grpc", "", "grpc++"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "proto": [["@", "bazel_remote_apis", "", "remote_execution_proto"]]
+ , "stage": ["src", "buildtool", "execution_api", "bazel_msg"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/execution_api/bazel_msg/bazel_blob.hpp b/src/buildtool/execution_api/bazel_msg/bazel_blob.hpp
new file mode 100644
index 00000000..115c4018
--- /dev/null
+++ b/src/buildtool/execution_api/bazel_msg/bazel_blob.hpp
@@ -0,0 +1,31 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_BLOB_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_BLOB_HPP
+
+#include <filesystem>
+#include <memory>
+#include <optional>
+#include <string>
+
+#include "src/buildtool/common/artifact_digest.hpp"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+
+struct BazelBlob {
+ BazelBlob(bazel_re::Digest mydigest, std::string mydata)
+ : digest{std::move(mydigest)}, data{std::move(mydata)} {}
+
+ bazel_re::Digest digest{};
+ std::string data{};
+};
+
+[[nodiscard]] static inline auto CreateBlobFromFile(
+ std::filesystem::path const& file_path) noexcept
+ -> std::optional<BazelBlob> {
+ auto const content = FileSystemManager::ReadFile(file_path);
+ if (not content.has_value()) {
+ return std::nullopt;
+ }
+ return BazelBlob{ArtifactDigest::Create(*content), *content};
+}
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_BLOB_HPP
diff --git a/src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp b/src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp
new file mode 100644
index 00000000..7005f129
--- /dev/null
+++ b/src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp
@@ -0,0 +1,264 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_BLOB_CONTAINER_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_BLOB_CONTAINER_HPP
+
+#include <string>
+#include <type_traits>
+#include <unordered_map>
+#include <utility>
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob.hpp"
+#include "src/utils/cpp/concepts.hpp"
+
+namespace detail {
+
+// Interface for transforming iteratee for wrapped_iterators
+template <class T_Iteratee, class T_Iterator>
+struct wrapped_iterator_transform {
+ [[nodiscard]] virtual auto operator()(T_Iterator const&) const& noexcept
+ -> T_Iteratee const& = 0;
+ [[nodiscard]] auto operator()(T_Iterator const&) && noexcept = delete;
+};
+
+// Wrap iterator from read-only container with custom transform. This class
+// represents a read-only iterable view with an implicit transform operation.
+template <class T_Iteratee,
+ class T_Iterator,
+ derived_from<wrapped_iterator_transform<T_Iteratee, T_Iterator>>
+ T_Transform>
+class wrapped_iterator {
+ public:
+ wrapped_iterator(T_Iterator it, T_Transform&& transform) noexcept
+ : it_{std::move(it)}, transform_{std::move(transform)} {}
+ wrapped_iterator(wrapped_iterator const& other) noexcept = default;
+ wrapped_iterator(wrapped_iterator&& other) noexcept = default;
+ ~wrapped_iterator() noexcept = default;
+
+ auto operator=(wrapped_iterator const& other) noexcept
+ -> wrapped_iterator& = default;
+ auto operator=(wrapped_iterator&& other) noexcept
+ -> wrapped_iterator& = default;
+
+ auto operator++() noexcept -> wrapped_iterator& {
+ ++it_;
+ return *this;
+ }
+
+ auto operator++(int) noexcept -> wrapped_iterator {
+ wrapped_iterator r = *this;
+ ++(*this);
+ return r;
+ }
+
+ [[nodiscard]] auto operator==(wrapped_iterator other) const noexcept
+ -> bool {
+ return it_ == other.it_;
+ }
+ [[nodiscard]] auto operator!=(wrapped_iterator other) const noexcept
+ -> bool {
+ return not(*this == other);
+ }
+ [[nodiscard]] auto operator*() const noexcept -> T_Iteratee const& {
+ return transform_(it_);
+ }
+ using difference_type = typename T_Iterator::difference_type;
+ using value_type = T_Iteratee;
+ using pointer = T_Iteratee const*;
+ using reference = T_Iteratee const&;
+ using iterator_category = std::forward_iterator_tag;
+
+ private:
+ T_Iterator it_;
+ T_Transform transform_;
+};
+
+} // namespace detail
+
+/// \brief Container for Blobs
+/// Can be used to iterate over digests or subset of blobs with certain digest.
+class BlobContainer {
+ using underlaying_map_t = std::unordered_map<bazel_re::Digest, BazelBlob>;
+ using item_iterator = underlaying_map_t::const_iterator;
+
+ // transform underlaying_map_t::value_type to BazelBlob
+ struct item_to_blob
+ : public detail::wrapped_iterator_transform<BazelBlob, item_iterator> {
+ public:
+ auto operator()(item_iterator const& it) const& noexcept
+ -> BazelBlob const& final {
+ return it->second;
+ }
+ };
+
+ public:
+ class iterator : public detail::wrapped_iterator<BazelBlob,
+ item_iterator,
+ item_to_blob> {
+ friend class BlobContainer;
+ explicit iterator(item_iterator const& it) noexcept
+ : wrapped_iterator{it, item_to_blob{}} {}
+ };
+
+ /// \brief Iterable read-only list for Digests
+ class DigestList {
+ friend class BlobContainer;
+
+ // transform underlaying_map_t::value_type to Digest
+ struct item_to_digest
+ : public detail::wrapped_iterator_transform<bazel_re::Digest,
+ item_iterator> {
+ public:
+ auto operator()(item_iterator const& it) const& noexcept
+ -> bazel_re::Digest const& final {
+ return it->first;
+ }
+ };
+
+ public:
+ /// \brief Read-only iterator for DigestList
+ class iterator : public detail::wrapped_iterator<bazel_re::Digest,
+ item_iterator,
+ item_to_digest> {
+ public:
+ explicit iterator(item_iterator const& it) noexcept
+ : wrapped_iterator{it, item_to_digest{}} {}
+ };
+
+ /// \brief Obtain start iterator for DigestList
+ [[nodiscard]] auto begin() const noexcept -> iterator {
+ return iterator(blobs_->cbegin());
+ }
+
+ /// \brief Obtain end iterator for DigestList
+ [[nodiscard]] auto end() const noexcept -> iterator {
+ return iterator(blobs_->cend());
+ }
+
+ private:
+ gsl::not_null<underlaying_map_t const*> blobs_;
+
+ explicit DigestList(underlaying_map_t const& blobs) noexcept
+ : blobs_{&blobs} {}
+ };
+
+ /// \brief Iterable read-only list for Blobs related to given Digests
+ class RelatedBlobList {
+ friend class BlobContainer;
+ using digest_iterator = std::vector<bazel_re::Digest>::const_iterator;
+
+ // transform Digest to BazelBlob
+ struct digest_to_blob
+ : public detail::wrapped_iterator_transform<BazelBlob,
+ digest_iterator> {
+ public:
+ explicit digest_to_blob(
+ gsl::not_null<underlaying_map_t const*> blobs) noexcept
+ : blobs_{std::move(blobs)} {}
+ digest_to_blob(digest_to_blob const& other) noexcept = default;
+ digest_to_blob(digest_to_blob&& other) noexcept = default;
+ ~digest_to_blob() noexcept = default;
+
+ auto operator=(digest_to_blob const& other) noexcept
+ -> digest_to_blob& = default;
+ auto operator=(digest_to_blob&& other) noexcept
+ -> digest_to_blob& = default;
+
+ auto operator()(digest_iterator const& it) const& noexcept
+ -> BazelBlob const& final {
+ try {
+ return blobs_->at(*it);
+ } catch (std::exception const&) {
+ return kEmpty;
+ }
+ }
+
+ private:
+ static inline BazelBlob kEmpty{bazel_re::Digest{}, std::string{}};
+ gsl::not_null<underlaying_map_t const*> blobs_;
+ };
+
+ public:
+ /// \brief Read-only iterator for RelatedBlobList
+ class iterator : public detail::wrapped_iterator<BazelBlob,
+ digest_iterator,
+ digest_to_blob> {
+ public:
+ iterator(
+ digest_iterator const& it,
+ gsl::not_null<underlaying_map_t const*> const& blobs) noexcept
+ : wrapped_iterator{it, digest_to_blob{blobs}} {}
+ };
+
+ /// \brief Obtain start iterator for RelatedBlobList
+ [[nodiscard]] auto begin() const noexcept -> iterator {
+ return iterator(digests_.cbegin(), blobs_);
+ }
+
+ /// \brief Obtain end iterator for RelatedBlobList
+ [[nodiscard]] auto end() const noexcept -> iterator {
+ return iterator(digests_.cend(), blobs_);
+ }
+
+ private:
+ std::vector<bazel_re::Digest> digests_;
+ gsl::not_null<underlaying_map_t const*> blobs_;
+
+ RelatedBlobList(underlaying_map_t const& blobs,
+ std::vector<bazel_re::Digest> digests) noexcept
+ : digests_{std::move(digests)}, blobs_{&blobs} {}
+ };
+
+ BlobContainer() noexcept = default;
+ explicit BlobContainer(std::vector<BazelBlob> blobs) {
+ blobs_.reserve(blobs.size());
+ for (auto& blob : blobs) {
+ blobs_.emplace(blob.digest, std::move(blob));
+ }
+ }
+
+ /// \brief Emplace new BazelBlob to container.
+ void Emplace(BazelBlob&& blob) {
+ blobs_.emplace(blob.digest, std::move(blob));
+ }
+
+ /// \brief Clear all BazelBlobs from container.
+ void Clear() noexcept { return blobs_.clear(); }
+
+ /// \brief Number of BazelBlobs in container.
+ [[nodiscard]] auto Size() const noexcept -> std::size_t {
+ return blobs_.size();
+ }
+
+ /// \brief Is equivalent BazelBlob (with same Digest) in container.
+ /// \param[in] blob BazelBlob to search equivalent BazelBlob for
+ [[nodiscard]] auto Contains(BazelBlob const& blob) const noexcept -> bool {
+ return blobs_.contains(blob.digest);
+ }
+
+ /// \brief Obtain iterable list of Digests from container.
+ [[nodiscard]] auto Digests() const noexcept -> DigestList {
+ return DigestList{blobs_};
+ }
+
+ /// \brief Obtain iterable list of BazelBlobs related to Digests.
+ /// \param[in] related Related Digests
+ [[nodiscard]] auto RelatedBlobs(
+ std::vector<bazel_re::Digest> const& related) const noexcept
+ -> RelatedBlobList {
+ return RelatedBlobList{blobs_, related};
+ };
+
+ [[nodiscard]] auto begin() const noexcept -> iterator {
+ return iterator{blobs_.begin()};
+ }
+
+ [[nodiscard]] auto end() const noexcept -> iterator {
+ return iterator{blobs_.end()};
+ }
+
+ private:
+ underlaying_map_t blobs_{};
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_BLOB_CONTAINER_HPP
diff --git a/src/buildtool/execution_api/bazel_msg/bazel_common.hpp b/src/buildtool/execution_api/bazel_msg/bazel_common.hpp
new file mode 100644
index 00000000..cc76541c
--- /dev/null
+++ b/src/buildtool/execution_api/bazel_msg/bazel_common.hpp
@@ -0,0 +1,21 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_COMMON_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_COMMON_HPP
+
+/// \file bazel_common.hpp
+/// \brief Common types and functions required by Bazel API.
+
+#include <cstdint>
+
+#include "src/utils/cpp/type_safe_arithmetic.hpp"
+
+// Port
+struct PortTag : type_safe_arithmetic_tag<std::uint16_t> {};
+using Port = type_safe_arithmetic<PortTag>;
+
+struct ExecutionConfiguration {
+ int execution_priority{};
+ int results_cache_priority{};
+ bool skip_cache_lookup{};
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_COMMON_HPP
diff --git a/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp b/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp
new file mode 100644
index 00000000..8fad8cae
--- /dev/null
+++ b/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.cpp
@@ -0,0 +1,590 @@
+#include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp"
+
+#include <algorithm>
+#include <exception>
+#include <filesystem>
+#include <functional>
+#include <memory>
+#include <sstream>
+#include <string>
+#include <variant>
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+
+namespace {
+
+/// \brief Abstract interface for bundle (message, content, and digest).
+/// Provides getters for content, corresponding digest, and creating a blob.
+class IBundle {
+ public:
+ using Ptr = std::unique_ptr<IBundle>;
+ using ContentCreateFunc = std::function<std::optional<std::string>()>;
+ using DigestCreateFunc =
+ std::function<bazel_re::Digest(std::string const&)>;
+
+ IBundle() = default;
+ IBundle(IBundle const&) = delete;
+ IBundle(IBundle&&) = delete;
+ auto operator=(IBundle const&) -> IBundle& = delete;
+ auto operator=(IBundle &&) -> IBundle& = delete;
+ virtual ~IBundle() noexcept = default;
+
+ [[nodiscard]] virtual auto Content() const& noexcept
+ -> std::string const& = 0;
+ [[nodiscard]] virtual auto Digest() const& noexcept
+ -> bazel_re::Digest const& = 0;
+ [[nodiscard]] auto MakeBlob() const noexcept -> BazelBlob {
+ return BazelBlob{Digest(), Content()};
+ }
+};
+
+/// \brief Sparse Bundle implementation for protobuf messages.
+/// It is called "Sparse" as it does not contain its own Digest. Instead, the
+/// protobuf message's Digest is used.
+/// \tparam T The actual protobuf message type.
+template <typename T>
+class SparseBundle final : public IBundle {
+ public:
+ using Ptr = std::unique_ptr<SparseBundle<T>>;
+
+ [[nodiscard]] auto Message() const noexcept -> T const& { return msg_; }
+
+ [[nodiscard]] auto Content() const& noexcept -> std::string const& final {
+ return content_;
+ }
+
+ [[nodiscard]] auto Digest() const& noexcept
+ -> bazel_re::Digest const& final {
+ return msg_.digest();
+ }
+
+ [[nodiscard]] static auto Create(T const& msg,
+ ContentCreateFunc const& content_creator,
+ DigestCreateFunc const& digest_creator)
+ -> Ptr {
+ auto content = content_creator();
+ if (content) {
+ // create bundle with message and content
+ Ptr bundle{new SparseBundle<T>{msg, std::move(*content)}};
+
+ // create digest
+ bundle->msg_.set_allocated_digest(gsl::owner<bazel_re::Digest*>{
+ new bazel_re::Digest{digest_creator(bundle->content_)}});
+ return bundle;
+ }
+ return Ptr{};
+ }
+
+ SparseBundle(SparseBundle const&) = delete;
+ SparseBundle(SparseBundle&&) = delete;
+ auto operator=(SparseBundle const&) -> SparseBundle& = delete;
+ auto operator=(SparseBundle &&) -> SparseBundle& = delete;
+ ~SparseBundle() noexcept final = default;
+
+ private:
+ T msg_{}; /**< Protobuf message */
+ std::string content_{}; /**< Content the message's digest refers to */
+
+ explicit SparseBundle(T msg, std::string&& content)
+ : msg_{std::move(msg)}, content_{std::move(content)} {}
+};
+
+/// \brief Full Bundle implementation for protobuf messages.
+/// Contains its own Digest memory, as the protobuf message does not contain
+/// one itself.
+/// \tparam T The actual protobuf message type.
+template <typename T>
+class FullBundle final : public IBundle {
+ public:
+ using Ptr = std::unique_ptr<FullBundle<T>>;
+
+ [[nodiscard]] auto Message() const noexcept -> T const& { return msg_; }
+
+ auto Content() const& noexcept -> std::string const& final {
+ return content_;
+ }
+
+ auto Digest() const& noexcept -> bazel_re::Digest const& final {
+ return digest_;
+ }
+
+ [[nodiscard]] static auto Create(T const& msg,
+ ContentCreateFunc const& content_creator,
+ DigestCreateFunc const& digest_creator)
+ -> Ptr {
+ auto content = content_creator();
+ if (content) {
+ // create bundle with message and content
+ Ptr bundle{new FullBundle<T>{msg, std::move(*content)}};
+
+ // create digest
+ bundle->digest_ = digest_creator(bundle->content_);
+ return bundle;
+ }
+ return Ptr{};
+ }
+
+ FullBundle(FullBundle const&) = delete;
+ FullBundle(FullBundle&&) = delete;
+ auto operator=(FullBundle const&) -> FullBundle& = delete;
+ auto operator=(FullBundle &&) -> FullBundle& = delete;
+ ~FullBundle() noexcept final = default;
+
+ private:
+ T msg_{}; /**< Protobuf message */
+ bazel_re::Digest digest_{}; /**< Digest of content */
+ std::string content_{}; /**< Content the digest refers to */
+
+ explicit FullBundle(T msg, std::string&& content)
+ : msg_{std::move(msg)}, content_{std::move(content)} {}
+};
+
+using DirectoryNodeBundle = SparseBundle<bazel_re::DirectoryNode>;
+using SymlinkNodeBundle = FullBundle<bazel_re::SymlinkNode>;
+using ActionBundle = FullBundle<bazel_re::Action>;
+using CommandBundle = FullBundle<bazel_re::Command>;
+
+/// \brief Serialize protobuf message to string.
+template <class T>
+[[nodiscard]] auto SerializeMessage(T const& message) noexcept
+ -> std::optional<std::string> {
+ try {
+ std::string content(message.ByteSizeLong(), '\0');
+ message.SerializeToArray(content.data(), content.size());
+ return content;
+ } catch (...) {
+ }
+ return std::nullopt;
+}
+
+/// \brief Create protobuf message 'Platform'.
+[[nodiscard]] auto CreatePlatform(
+ std::vector<bazel_re::Platform_Property> const& props) noexcept
+ -> std::unique_ptr<bazel_re::Platform> {
+ auto platform = std::make_unique<bazel_re::Platform>();
+ std::copy(props.cbegin(),
+ props.cend(),
+ pb::back_inserter(platform->mutable_properties()));
+ return platform;
+}
+
+/// \brief Create protobuf message 'Directory'.
+[[nodiscard]] auto CreateDirectory(
+ std::vector<bazel_re::FileNode> const& files,
+ std::vector<bazel_re::DirectoryNode> const& dirs,
+ std::vector<bazel_re::SymlinkNode> const& links,
+ std::vector<bazel_re::NodeProperty> const& props) noexcept
+ -> bazel_re::Directory {
+ bazel_re::Directory dir{};
+
+ auto copy_nodes = [](auto* pb_container, auto const& nodes) {
+ pb_container->Reserve(nodes.size());
+ std::copy(nodes.begin(), nodes.end(), pb::back_inserter(pb_container));
+ std::sort(
+ pb_container->begin(),
+ pb_container->end(),
+ [](auto const& l, auto const& r) { return l.name() < r.name(); });
+ };
+
+ copy_nodes(dir.mutable_files(), files);
+ copy_nodes(dir.mutable_directories(), dirs);
+ copy_nodes(dir.mutable_symlinks(), links);
+
+ std::copy(props.cbegin(),
+ props.cend(),
+ pb::back_inserter(dir.mutable_node_properties()));
+
+ return dir;
+}
+
+/// \brief Create protobuf message 'FileNode' without digest.
+[[nodiscard]] auto CreateFileNode(
+ std::string const& file_name,
+ ObjectType type,
+ std::vector<bazel_re::NodeProperty> const& props) noexcept
+ -> bazel_re::FileNode {
+ bazel_re::FileNode node;
+ node.set_name(file_name);
+ node.set_is_executable(IsExecutableObject(type));
+ std::copy(props.cbegin(),
+ props.cend(),
+ pb::back_inserter(node.mutable_node_properties()));
+ return node;
+}
+
+/// \brief Create protobuf message 'DirectoryNode' without digest.
+[[nodiscard]] auto CreateDirectoryNode(std::string const& dir_name) noexcept
+ -> bazel_re::DirectoryNode {
+ bazel_re::DirectoryNode node;
+ node.set_name(dir_name);
+ return node;
+}
+
+/// \brief Create profobuf message FileNode from Artifact::ObjectInfo
+[[nodiscard]] auto CreateFileNodeFromObjectInfo(
+ std::string const& name,
+ Artifact::ObjectInfo const& object_info) noexcept -> bazel_re::FileNode {
+ auto file_node = CreateFileNode(name, object_info.type, {});
+
+ file_node.set_allocated_digest(gsl::owner<bazel_re::Digest*>{
+ new bazel_re::Digest{object_info.digest}});
+
+ return file_node;
+}
+
+/// \brief Create profobuf message DirectoryNode from Artifact::ObjectInfo
+[[nodiscard]] auto CreateDirectoryNodeFromObjectInfo(
+ std::string const& name,
+ Artifact::ObjectInfo const& object_info) noexcept
+ -> bazel_re::DirectoryNode {
+ auto dir_node = CreateDirectoryNode(name);
+
+ dir_node.set_allocated_digest(gsl::owner<bazel_re::Digest*>{
+ new bazel_re::Digest{object_info.digest}});
+
+ return dir_node;
+}
+
+/// \brief Create bundle for profobuf message DirectoryNode from Directory.
+[[nodiscard]] auto CreateDirectoryNodeBundle(std::string const& dir_name,
+ bazel_re::Directory const& dir)
+ -> DirectoryNodeBundle::Ptr {
+ // setup protobuf message except digest
+ auto msg = CreateDirectoryNode(dir_name);
+ auto content_creator = [&dir] { return SerializeMessage(dir); };
+ auto digest_creator = [](std::string const& content) -> bazel_re::Digest {
+ return ArtifactDigest::Create(content);
+ };
+ return DirectoryNodeBundle::Create(msg, content_creator, digest_creator);
+}
+
+/// \brief Create bundle for profobuf message Command from args strings.
+[[nodiscard]] auto CreateCommandBundle(
+ std::vector<std::string> const& args,
+ std::vector<std::string> const& output_files,
+ std::vector<std::string> const& output_dirs,
+ std::vector<bazel_re::Command_EnvironmentVariable> const& env_vars,
+ std::vector<bazel_re::Platform_Property> const& platform_properties)
+ -> CommandBundle::Ptr {
+ bazel_re::Command msg;
+ msg.set_allocated_platform(CreatePlatform(platform_properties).release());
+ std::copy(std::cbegin(args),
+ std::cend(args),
+ pb::back_inserter(msg.mutable_arguments()));
+ std::copy(std::cbegin(output_files),
+ std::cend(output_files),
+ pb::back_inserter(msg.mutable_output_files()));
+ std::copy(std::cbegin(output_dirs),
+ std::cend(output_dirs),
+ pb::back_inserter(msg.mutable_output_directories()));
+ std::copy(std::cbegin(env_vars),
+ std::cend(env_vars),
+ pb::back_inserter(msg.mutable_environment_variables()));
+
+ auto content_creator = [&msg] { return SerializeMessage(msg); };
+
+ auto digest_creator = [](std::string const& content) -> bazel_re::Digest {
+ return ArtifactDigest::Create(content);
+ };
+
+ return CommandBundle::Create(msg, content_creator, digest_creator);
+}
+
+/// \brief Create bundle for profobuf message Action from Command.
+[[nodiscard]] auto CreateActionBundle(
+ bazel_re::Digest const& command,
+ bazel_re::Digest const& root_dir,
+ std::vector<std::string> const& output_node_properties,
+ bool do_not_cache,
+ std::chrono::milliseconds const& timeout) -> ActionBundle::Ptr {
+ using seconds = std::chrono::seconds;
+ using nanoseconds = std::chrono::nanoseconds;
+ auto sec = std::chrono::duration_cast<seconds>(timeout);
+ auto nanos = std::chrono::duration_cast<nanoseconds>(timeout - sec);
+
+ auto duration = std::make_unique<google::protobuf::Duration>();
+ duration->set_seconds(sec.count());
+ duration->set_nanos(nanos.count());
+
+ bazel_re::Action msg;
+ msg.set_do_not_cache(do_not_cache);
+ msg.set_allocated_timeout(duration.release());
+ msg.set_allocated_command_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{command}});
+ msg.set_allocated_input_root_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{root_dir}});
+ std::copy(output_node_properties.cbegin(),
+ output_node_properties.cend(),
+ pb::back_inserter(msg.mutable_output_node_properties()));
+
+ auto content_creator = [&msg] { return SerializeMessage(msg); };
+
+ auto digest_creator = [](std::string const& content) -> bazel_re::Digest {
+ return ArtifactDigest::Create(content);
+ };
+
+ return ActionBundle::Create(msg, content_creator, digest_creator);
+}
+
+[[nodiscard]] auto CreateObjectInfo(bazel_re::DirectoryNode const& node)
+ -> Artifact::ObjectInfo {
+ return Artifact::ObjectInfo{ArtifactDigest{node.digest()},
+ ObjectType::Tree};
+}
+
+[[nodiscard]] auto CreateObjectInfo(bazel_re::FileNode const& node)
+ -> Artifact::ObjectInfo {
+ return Artifact::ObjectInfo{
+ ArtifactDigest{node.digest()},
+ node.is_executable() ? ObjectType::Executable : ObjectType::File};
+}
+
+class DirectoryTree;
+using DirectoryTreePtr = std::unique_ptr<DirectoryTree>;
+
+/// \brief Tree of `Artifact*` that can be converted to `DirectoryNodeBundle`.
+class DirectoryTree {
+ public:
+ /// \brief Add `Artifact*` to tree.
+ [[nodiscard]] auto AddArtifact(std::filesystem::path const& path,
+ Artifact const* artifact) -> bool {
+ auto const norm_path = path.lexically_normal();
+ if (norm_path.empty() or
+ not FileSystemManager::IsRelativePath(norm_path)) {
+ return false;
+ }
+ auto it = norm_path.begin();
+ return AddArtifact(&it, norm_path.end(), artifact);
+ }
+
+ /// \brief Convert tree to `DirectoryNodeBundle`.
+ [[nodiscard]] auto ToBundle(
+ std::string const& root_name,
+ std::optional<BazelMsgFactory::BlobStoreFunc> const& store_blob,
+ std::optional<BazelMsgFactory::InfoStoreFunc> const& store_info,
+ std::filesystem::path const& parent = "") const
+ -> DirectoryNodeBundle::Ptr {
+ std::vector<bazel_re::FileNode> file_nodes;
+ std::vector<bazel_re::DirectoryNode> dir_nodes;
+ for (auto const& [name, node] : nodes) {
+ if (std::holds_alternative<DirectoryTreePtr>(node)) {
+ auto const& dir = std::get<DirectoryTreePtr>(node);
+ auto const dir_bundle =
+ dir->ToBundle(name, store_blob, store_info, parent / name);
+ if (not dir_bundle) {
+ return nullptr;
+ }
+ dir_nodes.push_back(dir_bundle->Message());
+ if (store_blob) {
+ (*store_blob)(dir_bundle->MakeBlob());
+ }
+ }
+ else {
+ auto const& artifact = std::get<Artifact const*>(node);
+ auto const& object_info = artifact->Info();
+ if (not object_info) {
+ return nullptr;
+ }
+ if (IsTreeObject(object_info->type)) {
+ dir_nodes.push_back(
+ CreateDirectoryNodeFromObjectInfo(name, *object_info));
+ }
+ else {
+ file_nodes.push_back(
+ CreateFileNodeFromObjectInfo(name, *object_info));
+ }
+ if (store_info and
+ not(*store_info)(parent / name, *object_info)) {
+ return nullptr;
+ }
+ }
+ }
+ return CreateDirectoryNodeBundle(
+ root_name, CreateDirectory(file_nodes, dir_nodes, {}, {}));
+ }
+
+ private:
+ using Node = std::variant<DirectoryTreePtr, Artifact const*>;
+ std::unordered_map<std::string, Node> nodes;
+
+ [[nodiscard]] auto AddArtifact(std::filesystem::path::iterator* begin,
+ std::filesystem::path::iterator const& end,
+ Artifact const* artifact) -> bool {
+ auto segment = *((*begin)++);
+ if (segment == "." or segment == "..") { // fail on "." and ".."
+ return false;
+ }
+ if (*begin == end) {
+ return nodes.emplace(segment, artifact).second;
+ }
+ auto const [it, success] =
+ nodes.emplace(segment, std::make_unique<DirectoryTree>());
+ return (success or
+ std::holds_alternative<DirectoryTreePtr>(it->second)) and
+ std::get<DirectoryTreePtr>(it->second)
+ ->AddArtifact(begin, end, artifact);
+ }
+};
+
+} // namespace
+
+auto BazelMsgFactory::ReadObjectInfosFromDirectory(
+ bazel_re::Directory const& dir,
+ InfoStoreFunc const& store_info) noexcept -> bool {
+ try {
+ for (auto const& f : dir.files()) {
+ if (not store_info(f.name(), CreateObjectInfo(f))) {
+ return false;
+ }
+ }
+ for (auto const& d : dir.directories()) {
+ if (not store_info(d.name(), CreateObjectInfo(d))) {
+ return false;
+ }
+ }
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "reading object infos from Directory failed with:\n{}",
+ ex.what());
+ return false;
+ }
+ return true;
+}
+
+auto BazelMsgFactory::CreateDirectoryDigestFromTree(
+ std::vector<DependencyGraph::NamedArtifactNodePtr> const& artifacts,
+ std::optional<BlobStoreFunc> const& store_blob,
+ std::optional<InfoStoreFunc> const& store_info)
+ -> std::optional<bazel_re::Digest> {
+ DirectoryTree build_root{};
+ for (auto const& [local_path, node] : artifacts) {
+ auto const* artifact = &node->Content();
+ if (not build_root.AddArtifact(local_path, artifact)) {
+ Logger::Log(LogLevel::Error,
+ "failed to add artifact {} ({}) to build root",
+ local_path,
+ artifact->Digest().value_or(ArtifactDigest{}).hash());
+ return std::nullopt;
+ }
+ }
+
+ auto bundle = build_root.ToBundle("", store_blob, store_info);
+ if (not bundle) {
+ return std::nullopt;
+ }
+ if (store_blob) {
+ (*store_blob)(bundle->MakeBlob());
+ }
+ return bundle->Digest();
+}
+
+auto BazelMsgFactory::CreateDirectoryDigestFromLocalTree(
+ std::filesystem::path const& root,
+ FileStoreFunc const& store_file,
+ DirStoreFunc const& store_dir) noexcept -> std::optional<bazel_re::Digest> {
+ std::vector<bazel_re::FileNode> files{};
+ std::vector<bazel_re::DirectoryNode> dirs{};
+
+ auto dir_reader = [&files, &dirs, &root, &store_file, &store_dir](
+ auto name, auto type) {
+ if (IsTreeObject(type)) {
+ // create and store sub directory
+ auto digest = CreateDirectoryDigestFromLocalTree(
+ root / name, store_file, store_dir);
+ if (not digest) {
+ return false;
+ }
+
+ auto dir = CreateDirectoryNode(name.string());
+ dir.set_allocated_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{*digest}});
+ dirs.emplace_back(std::move(dir));
+ return true;
+ }
+
+ // create and store file
+ try {
+ if (auto digest =
+ store_file(root / name, type == ObjectType::Executable)) {
+ auto file = CreateFileNode(name.string(), type, {});
+ file.set_allocated_digest(gsl::owner<bazel_re::Digest*>{
+ new bazel_re::Digest{std::move(*digest)}});
+ files.emplace_back(std::move(file));
+ return true;
+ }
+ } catch (std::exception const& ex) {
+ Logger::Log(
+ LogLevel::Error, "storing file failed with:\n{}", ex.what());
+ }
+ return false;
+ };
+
+ if (FileSystemManager::ReadDirectory(root, dir_reader)) {
+ auto dir = CreateDirectory(files, dirs, {}, {});
+ if (auto bytes = SerializeMessage(dir)) {
+ try {
+ if (auto digest = store_dir(*bytes, dir)) {
+ return *digest;
+ }
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "storing directory failed with:\n{}",
+ ex.what());
+ }
+ return std::nullopt;
+ }
+ }
+ return std::nullopt;
+}
+
+auto BazelMsgFactory::CreateActionDigestFromCommandLine(
+ std::vector<std::string> const& cmdline,
+ bazel_re::Digest const& exec_dir,
+ std::vector<std::string> const& output_files,
+ std::vector<std::string> const& output_dirs,
+ std::vector<std::string> const& output_node_properties,
+ std::vector<bazel_re::Command_EnvironmentVariable> const& env_vars,
+ std::vector<bazel_re::Platform_Property> const& properties,
+ bool do_not_cache,
+ std::chrono::milliseconds const& timeout,
+ std::optional<BlobStoreFunc> const& store_blob) -> bazel_re::Digest {
+ // create command
+ auto cmd = CreateCommandBundle(
+ cmdline, output_files, output_dirs, env_vars, properties);
+
+ // create action
+ auto action = CreateActionBundle(
+ cmd->Digest(), exec_dir, output_node_properties, do_not_cache, timeout);
+
+ if (store_blob) {
+ (*store_blob)(cmd->MakeBlob());
+ (*store_blob)(action->MakeBlob());
+ }
+
+ return action->Digest();
+}
+
+auto BazelMsgFactory::DirectoryToString(bazel_re::Directory const& dir) noexcept
+ -> std::optional<std::string> {
+ auto json = nlohmann::json::object();
+ try {
+ if (not BazelMsgFactory::ReadObjectInfosFromDirectory(
+ dir, [&json](auto path, auto info) {
+ json[path.string()] = info.ToString();
+ return true;
+ })) {
+ Logger::Log(LogLevel::Error,
+ "reading object infos from Directory failed");
+ return std::nullopt;
+ }
+ return json.dump(2);
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "dumping Directory to string failed with:\n{}",
+ ex.what());
+ return std::nullopt;
+ }
+}
diff --git a/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp b/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp
new file mode 100644
index 00000000..d850e6c6
--- /dev/null
+++ b/src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp
@@ -0,0 +1,128 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_MSG_FACTORY_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_MSG_FACTORY_HPP
+
+#include <chrono>
+#include <filesystem>
+#include <functional>
+#include <memory>
+#include <optional>
+#include <string>
+#include <vector>
+
+#include "src/buildtool/common/artifact.hpp"
+#include "src/buildtool/common/artifact_digest.hpp"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp"
+#include "src/buildtool/execution_engine/dag/dag.hpp"
+
+/// \brief Factory for creating Bazel API protobuf messages.
+/// Responsible for creating protobuf messages necessary for Bazel API server
+/// communication.
+class BazelMsgFactory {
+ public:
+ using BlobStoreFunc = std::function<void(BazelBlob&&)>;
+ using InfoStoreFunc = std::function<bool(std::filesystem::path const&,
+ Artifact::ObjectInfo const&)>;
+ using FileStoreFunc = std::function<
+ std::optional<bazel_re::Digest>(std::filesystem::path const&, bool)>;
+ using DirStoreFunc = std::function<std::optional<bazel_re::Digest>(
+ std::string const&,
+ bazel_re::Directory const&)>;
+
+ /// \brief Read object infos from directory.
+ /// \returns true on success.
+ [[nodiscard]] static auto ReadObjectInfosFromDirectory(
+ bazel_re::Directory const& dir,
+ InfoStoreFunc const& store_info) noexcept -> bool;
+
+ /// \brief Create Directory digest from artifact tree structure.
+ /// Recursively traverse entire tree and create blobs for sub-directories.
+ /// \param artifacts Artifact tree structure.
+ /// \param store_blob Function for storing Directory blobs.
+ /// \param store_info Function for storing object infos.
+ /// \returns Digest representing the entire tree.
+ [[nodiscard]] static auto CreateDirectoryDigestFromTree(
+ std::vector<DependencyGraph::NamedArtifactNodePtr> const& artifacts,
+ std::optional<BlobStoreFunc> const& store_blob = std::nullopt,
+ std::optional<InfoStoreFunc> const& store_info = std::nullopt)
+ -> std::optional<bazel_re::Digest>;
+
+ /// \brief Create Directory digest from local file root.
+ /// Recursively traverse entire root and store files and directories.
+ /// \param root Path to local file root.
+ /// \param store_file Function for storing local file via path.
+ /// \param store_dir Function for storing Directory blobs.
+ /// \returns Digest representing the entire file root.
+ [[nodiscard]] static auto CreateDirectoryDigestFromLocalTree(
+ std::filesystem::path const& root,
+ FileStoreFunc const& store_file,
+ DirStoreFunc const& store_dir) noexcept
+ -> std::optional<bazel_re::Digest>;
+
+ /// \brief Creates Action digest from command line.
+ /// As part of the internal process, it creates an ActionBundle and
+ /// CommandBundle that can be captured via BlobStoreFunc.
+ /// \param[in] cmdline The command line.
+ /// \param[in] exec_dir The Digest of the execution directory.
+ /// \param[in] output_files The paths of output files.
+ /// \param[in] output_dirs The paths of output directories.
+ /// \param[in] output_node. The output node's properties.
+ /// \param[in] env_vars The environment variables set.
+ /// \param[in] properties The target platform's properties.
+ /// \param[in] do_not_cache Skip action cache.
+ /// \param[in] timeout The command execution timeout.
+ /// \param[in] store_blob Function for storing action and cmd bundles.
+ /// \returns Digest representing the action.
+ [[nodiscard]] static auto CreateActionDigestFromCommandLine(
+ std::vector<std::string> const& cmdline,
+ bazel_re::Digest const& exec_dir,
+ std::vector<std::string> const& output_files,
+ std::vector<std::string> const& output_dirs,
+ std::vector<std::string> const& output_node_properties,
+ std::vector<bazel_re::Command_EnvironmentVariable> const& env_vars,
+ std::vector<bazel_re::Platform_Property> const& properties,
+ bool do_not_cache,
+ std::chrono::milliseconds const& timeout,
+ std::optional<BlobStoreFunc> const& store_blob = std::nullopt)
+ -> bazel_re::Digest;
+
+ /// \brief Create descriptive string from Directory protobuf message.
+ [[nodiscard]] static auto DirectoryToString(
+ bazel_re::Directory const& dir) noexcept -> std::optional<std::string>;
+
+ /// \brief Create message vector from std::map.
+ /// \param[in] input map
+ /// \tparam T protobuf message type. It must be a name-value
+ /// message (i.e. class methods T::set_name(std::string) and
+ /// T::set_value(std::string) must exist)
+ template <class T>
+ [[nodiscard]] static auto CreateMessageVectorFromMap(
+ std::map<std::string, std::string> const& input) noexcept
+ -> std::vector<T> {
+ std::vector<T> output{};
+ std::transform(std::begin(input),
+ std::end(input),
+ std::back_inserter(output),
+ [](auto const& key_val) {
+ T msg;
+ msg.set_name(key_val.first);
+ msg.set_value(key_val.second);
+ return msg;
+ });
+ return output;
+ }
+
+ template <class T>
+ [[nodiscard]] static auto MessageFromString(std::string const& blob)
+ -> std::optional<T> {
+ T msg{};
+ if (msg.ParseFromString(blob)) {
+ return msg;
+ }
+ Logger::Log(LogLevel::Error, "failed to parse message from string");
+ return std::nullopt;
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_BAZEL_MSG_BAZEL_MSG_FACTORY_HPP
diff --git a/src/buildtool/execution_api/common/TARGETS b/src/buildtool/execution_api/common/TARGETS
new file mode 100644
index 00000000..aa3ad0bd
--- /dev/null
+++ b/src/buildtool/execution_api/common/TARGETS
@@ -0,0 +1,22 @@
+{ "common":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["common"]
+ , "hdrs":
+ [ "execution_common.hpp"
+ , "execution_api.hpp"
+ , "execution_action.hpp"
+ , "execution_response.hpp"
+ , "local_tree_map.hpp"
+ ]
+ , "deps":
+ [ ["@", "gsl-lite", "", "gsl-lite"]
+ , ["src/buildtool/common", "common"]
+ , ["src/buildtool/crypto", "hash_generator"]
+ , ["src/buildtool/file_system", "object_type"]
+ , ["src/buildtool/execution_api/bazel_msg", "bazel_msg"]
+ , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"]
+ , ["src/utils/cpp", "hex_string"]
+ ]
+ , "stage": ["src", "buildtool", "execution_api", "common"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/execution_api/common/execution_action.hpp b/src/buildtool/execution_api/common/execution_action.hpp
new file mode 100644
index 00000000..58176bda
--- /dev/null
+++ b/src/buildtool/execution_api/common/execution_action.hpp
@@ -0,0 +1,58 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_REMOTE_EXECUTION_ACTION_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_REMOTE_EXECUTION_ACTION_HPP
+
+#include <chrono>
+#include <memory>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/execution_api/common/execution_response.hpp"
+
+class Logger;
+class ExecutionArtifactContainer;
+
+/// \brief Abstract action.
+/// Can execute multiple commands. Commands are executed in arbitrary order and
+/// cannot depend on each other.
+class IExecutionAction {
+ public:
+ using Ptr = std::unique_ptr<IExecutionAction>;
+
+ enum class CacheFlag {
+ CacheOutput, ///< run and cache, or serve from cache
+ DoNotCacheOutput, ///< run and do not cache, never served from cached
+ FromCacheOnly, ///< do not run, only serve from cache
+ PretendCached ///< always run, respond same action id as if cached
+ };
+
+ static constexpr std::chrono::milliseconds kDefaultTimeout{1000};
+
+ [[nodiscard]] static constexpr auto CacheEnabled(CacheFlag f) -> bool {
+ return f == CacheFlag::CacheOutput or f == CacheFlag::FromCacheOnly;
+ }
+
+ [[nodiscard]] static constexpr auto ExecutionEnabled(CacheFlag f) -> bool {
+ return f == CacheFlag::CacheOutput or
+ f == CacheFlag::DoNotCacheOutput or
+ f == CacheFlag::PretendCached;
+ }
+
+ IExecutionAction() = default;
+ IExecutionAction(IExecutionAction const&) = delete;
+ IExecutionAction(IExecutionAction&&) = delete;
+ auto operator=(IExecutionAction const&) -> IExecutionAction& = delete;
+ auto operator=(IExecutionAction &&) -> IExecutionAction& = delete;
+ virtual ~IExecutionAction() = default;
+
+ /// \brief Execute the action.
+ /// \returns Execution response, with commands' outputs and artifacts.
+ /// \returns nullptr if execution failed.
+ // NOLINTNEXTLINE(google-default-arguments)
+ [[nodiscard]] virtual auto Execute(Logger const* logger = nullptr) noexcept
+ -> IExecutionResponse::Ptr = 0;
+
+ virtual void SetCacheFlag(CacheFlag flag) noexcept = 0;
+
+ virtual void SetTimeout(std::chrono::milliseconds timeout) noexcept = 0;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_REMOTE_EXECUTION_ACTION_HPP
diff --git a/src/buildtool/execution_api/common/execution_api.hpp b/src/buildtool/execution_api/common/execution_api.hpp
new file mode 100644
index 00000000..92002d48
--- /dev/null
+++ b/src/buildtool/execution_api/common/execution_api.hpp
@@ -0,0 +1,78 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_EXECUTION_APIHPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_EXECUTION_APIHPP
+
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/artifact.hpp" // Artifact::ObjectInfo
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp"
+#include "src/buildtool/execution_api/common/execution_action.hpp"
+
+/// \brief Abstract remote execution API
+/// Can be used to create actions.
+class IExecutionApi {
+ public:
+ using Ptr = std::unique_ptr<IExecutionApi>;
+
+ IExecutionApi() = default;
+ IExecutionApi(IExecutionApi const&) = delete;
+ IExecutionApi(IExecutionApi&&) = default;
+ auto operator=(IExecutionApi const&) -> IExecutionApi& = delete;
+ auto operator=(IExecutionApi &&) -> IExecutionApi& = default;
+ virtual ~IExecutionApi() = default;
+
+ /// \brief Create a new action.
+ /// \param[in] root_digest Digest of the build root.
+ /// \param[in] command Command as argv vector
+ /// \param[in] output_files List of paths to output files.
+ /// \param[in] output_dirs List of paths to output directories.
+ /// \param[in] env_vars The environment variables to set.
+ /// \param[in] properties Platform properties to set.
+ /// \returns The new action.
+ [[nodiscard]] virtual auto CreateAction(
+ ArtifactDigest const& root_digest,
+ std::vector<std::string> const& command,
+ std::vector<std::string> const& output_files,
+ std::vector<std::string> const& output_dirs,
+ std::map<std::string, std::string> const& env_vars,
+ std::map<std::string, std::string> const& properties) noexcept
+ -> IExecutionAction::Ptr = 0;
+
+ /// \brief Retrieve artifacts from CAS and store to specified paths.
+ /// Tree artifacts are resolved its containing file artifacts are
+ /// recursively retrieved.
+ [[nodiscard]] virtual auto RetrieveToPaths(
+ std::vector<Artifact::ObjectInfo> const& artifacts_info,
+ std::vector<std::filesystem::path> const& output_paths) noexcept
+ -> bool = 0;
+
+ /// \brief Retrieve artifacts from CAS and write to file descriptors.
+ /// Tree artifacts are not resolved and instead the raw protobuf message
+ /// will be written to fd.
+ [[nodiscard]] virtual auto RetrieveToFds(
+ std::vector<Artifact::ObjectInfo> const& artifacts_info,
+ std::vector<int> const& fds) noexcept -> bool = 0;
+
+ /// \brief Upload blobs to CAS. Uploads only the blobs that are not yet
+ /// available in CAS, unless `skip_find_missing` is specified.
+ /// \param blobs Container of blobs to upload.
+ /// \param skip_find_missing Skip finding missing blobs, just upload all.
+ /// NOLINTNEXTLINE(google-default-arguments)
+ [[nodiscard]] virtual auto Upload(BlobContainer const& blobs,
+ bool skip_find_missing = false) noexcept
+ -> bool = 0;
+
+ [[nodiscard]] virtual auto UploadTree(
+ std::vector<DependencyGraph::NamedArtifactNodePtr> const&
+ artifacts) noexcept -> std::optional<ArtifactDigest> = 0;
+
+ [[nodiscard]] virtual auto IsAvailable(
+ ArtifactDigest const& digest) const noexcept -> bool = 0;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_EXECUTION_APIHPP
diff --git a/src/buildtool/execution_api/common/execution_common.hpp b/src/buildtool/execution_api/common/execution_common.hpp
new file mode 100644
index 00000000..8b6aea40
--- /dev/null
+++ b/src/buildtool/execution_api/common/execution_common.hpp
@@ -0,0 +1,109 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_EXECUTION_COMMON_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_EXECUTION_COMMON_HPP
+
+#ifdef __unix__
+#include <sys/types.h>
+#include <unistd.h>
+#else
+#error "Non-unix is not supported yet"
+#endif
+
+#include <array>
+#include <filesystem>
+#include <optional>
+#include <random>
+#include <sstream>
+#include <string>
+#include <thread>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/crypto/hash_generator.hpp"
+#include "src/buildtool/logging/logger.hpp"
+#include "src/utils/cpp/hex_string.hpp"
+
+/// \brief Create unique ID for current process and thread.
+[[nodiscard]] static inline auto CreateProcessUniqueId() noexcept
+ -> std::optional<std::string> {
+#ifdef __unix__
+ pid_t pid{};
+ try {
+ pid = getpid();
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ return std::nullopt;
+ }
+#endif
+ auto tid = std::this_thread::get_id();
+ std::ostringstream id{};
+ id << pid << "-" << tid;
+ return id.str();
+}
+
+/// \brief Create unique path based on file_path.
+[[nodiscard]] static inline auto CreateUniquePath(
+ std::filesystem::path file_path) noexcept
+ -> std::optional<std::filesystem::path> {
+ auto id = CreateProcessUniqueId();
+ if (id) {
+ return file_path.concat("." + *id);
+ }
+ return std::nullopt;
+}
+
+[[nodiscard]] static auto GetNonDeterministicRandomNumber() -> unsigned int {
+ std::uniform_int_distribution<unsigned int> dist{};
+ std::random_device urandom{
+#ifdef __unix__
+ "/dev/urandom"
+#endif
+ };
+ return dist(urandom);
+}
+
+static auto kRandomConstant = GetNonDeterministicRandomNumber();
+
+static void EncodeUUIDVersion4(std::string* uuid) {
+ constexpr auto kVersionByte = 6UL;
+ constexpr auto kVersionBits = 0x40U; // version 4: 0100 xxxx
+ constexpr auto kClearMask = 0x0fU;
+ gsl_Expects(uuid->size() >= kVersionByte);
+ auto& byte = uuid->at(kVersionByte);
+ byte = static_cast<char>(kVersionBits |
+ (kClearMask & static_cast<std::uint8_t>(byte)));
+}
+
+static void EncodeUUIDVariant1(std::string* uuid) {
+ constexpr auto kVariantByte = 8UL;
+ constexpr auto kVariantBits = 0x80U; // variant 1: 10xx xxxx
+ constexpr auto kClearMask = 0x3fU;
+ gsl_Expects(uuid->size() >= kVariantByte);
+ auto& byte = uuid->at(kVariantByte);
+ byte = static_cast<char>(kVariantBits |
+ (kClearMask & static_cast<std::uint8_t>(byte)));
+}
+
+/// \brief Create UUID version 4 from seed.
+[[nodiscard]] static inline auto CreateUUIDVersion4(std::string const& seed)
+ -> std::string {
+ constexpr auto kRawLength = 16UL;
+ constexpr auto kHexDashPos = std::array{8UL, 12UL, 16UL, 20UL};
+
+ auto value = fmt::format("{}-{}", std::to_string(kRandomConstant), seed);
+ auto uuid = HashGenerator{HashGenerator::HashType::SHA1}.Run(value).Bytes();
+ EncodeUUIDVersion4(&uuid);
+ EncodeUUIDVariant1(&uuid);
+ gsl_Expects(uuid.size() >= kRawLength);
+
+ std::size_t cur{};
+ std::ostringstream ss{};
+ auto uuid_hex = ToHexString(uuid.substr(0, kRawLength));
+ for (auto pos : kHexDashPos) {
+ ss << uuid_hex.substr(cur, pos - cur) << '-';
+ cur = pos;
+ }
+ ss << uuid_hex.substr(cur);
+ gsl_EnsuresAudit(ss.str().size() == (2 * kRawLength) + kHexDashPos.size());
+ return ss.str();
+}
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_EXECUTION_COMMON_HPP
diff --git a/src/buildtool/execution_api/common/execution_response.hpp b/src/buildtool/execution_api/common/execution_response.hpp
new file mode 100644
index 00000000..76349018
--- /dev/null
+++ b/src/buildtool/execution_api/common/execution_response.hpp
@@ -0,0 +1,48 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_REMOTE_EXECUTION_RESPONSE_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_REMOTE_EXECUTION_RESPONSE_HPP
+
+#include <memory>
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/artifact.hpp"
+
+/// \brief Abstract response.
+/// Response of an action execution. Contains outputs from multiple commands and
+/// a single container with artifacts.
+class IExecutionResponse {
+ public:
+ using Ptr = std::unique_ptr<IExecutionResponse>;
+ using ArtifactInfos = std::unordered_map<std::string, Artifact::ObjectInfo>;
+
+ enum class StatusCode { Failed, Success };
+
+ IExecutionResponse() = default;
+ IExecutionResponse(IExecutionResponse const&) = delete;
+ IExecutionResponse(IExecutionResponse&&) = delete;
+ auto operator=(IExecutionResponse const&) -> IExecutionResponse& = delete;
+ auto operator=(IExecutionResponse &&) -> IExecutionResponse& = delete;
+ virtual ~IExecutionResponse() = default;
+
+ [[nodiscard]] virtual auto Status() const noexcept -> StatusCode = 0;
+
+ [[nodiscard]] virtual auto ExitCode() const noexcept -> int = 0;
+
+ [[nodiscard]] virtual auto IsCached() const noexcept -> bool = 0;
+
+ [[nodiscard]] virtual auto HasStdErr() const noexcept -> bool = 0;
+
+ [[nodiscard]] virtual auto HasStdOut() const noexcept -> bool = 0;
+
+ [[nodiscard]] virtual auto StdErr() noexcept -> std::string = 0;
+
+ [[nodiscard]] virtual auto StdOut() noexcept -> std::string = 0;
+
+ [[nodiscard]] virtual auto ActionDigest() const noexcept -> std::string = 0;
+
+ [[nodiscard]] virtual auto Artifacts() const noexcept -> ArtifactInfos = 0;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_REMOTE_EXECUTION_RESPONSE_HPP
diff --git a/src/buildtool/execution_api/common/local_tree_map.hpp b/src/buildtool/execution_api/common/local_tree_map.hpp
new file mode 100644
index 00000000..77de2d53
--- /dev/null
+++ b/src/buildtool/execution_api/common/local_tree_map.hpp
@@ -0,0 +1,140 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_LOCAL_TREE_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_LOCAL_TREE_MAP_HPP
+
+#include <filesystem>
+#include <shared_mutex>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/artifact.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+/// \brief Maps digest of `bazel_re::Directory` to `LocalTree`.
+class LocalTreeMap {
+ /// \brief Thread-safe pool of unique object infos.
+ class ObjectInfoPool {
+ public:
+ /// Get pointer to stored info, or a add new one and return its pointer.
+ [[nodiscard]] auto GetOrAdd(Artifact::ObjectInfo const& info)
+ -> Artifact::ObjectInfo const* {
+ { // get
+ std::shared_lock lock{mutex_};
+ auto it = infos_.find(info);
+ if (it != infos_.end()) {
+ return &(*it);
+ }
+ }
+ { // or add
+ std::unique_lock lock{mutex_};
+ return &(*infos_.emplace(info).first);
+ }
+ }
+
+ private:
+ std::unordered_set<Artifact::ObjectInfo> infos_;
+ mutable std::shared_mutex mutex_;
+ };
+
+ public:
+ /// \brief Maps blob locations to object infos.
+ class LocalTree {
+ friend class LocalTreeMap;
+
+ public:
+ /// \brief Add a new path and info pair to the tree.
+ /// Path must not be absolute, empty, or contain dot-segments.
+ /// \param path The location to add the object info.
+ /// \param info The object info to add.
+ /// \returns true if successfully inserted or info existed before.
+ [[nodiscard]] auto AddInfo(std::filesystem::path const& path,
+ Artifact::ObjectInfo const& info) noexcept
+ -> bool {
+ auto norm_path = path.lexically_normal();
+ if (norm_path.is_absolute() or norm_path.empty() or
+ *norm_path.begin() == "..") {
+ Logger::Log(LogLevel::Error,
+ "cannot add malformed path to local tree: {}",
+ path.string());
+ return false;
+ }
+ try {
+ if (entries_.contains(norm_path.string())) {
+ return true;
+ }
+ if (auto const* info_ptr = infos_->GetOrAdd(info)) {
+ entries_.emplace(norm_path.string(), info_ptr);
+ return true;
+ }
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "adding object info to tree failed with:\n{}",
+ ex.what());
+ }
+ return false;
+ }
+
+ [[nodiscard]] auto size() const noexcept { return entries_.size(); }
+ [[nodiscard]] auto begin() const noexcept { return entries_.begin(); }
+ [[nodiscard]] auto end() const noexcept { return entries_.end(); }
+
+ private:
+ gsl::not_null<ObjectInfoPool*> infos_;
+ std::unordered_map<std::string,
+ gsl::not_null<Artifact::ObjectInfo const*>>
+ entries_{};
+
+ explicit LocalTree(gsl::not_null<ObjectInfoPool*> infos) noexcept
+ : infos_{std::move(infos)} {}
+ };
+
+ /// \brief Create a new `LocalTree` object.
+ [[nodiscard]] auto CreateTree() noexcept -> LocalTree {
+ return LocalTree{&infos_};
+ }
+
+ /// \brief Get pointer to existing `LocalTree` object.
+ /// \param root_digest The root digest of the tree to lookup.
+ /// \returns nullptr if no tree was found for given root digest.
+ [[nodiscard]] auto GetTree(bazel_re::Digest const& root_digest)
+ const noexcept -> LocalTree const* {
+ std::shared_lock lock{mutex_};
+ auto it = trees_.find(root_digest);
+ return (it != trees_.end()) ? &(it->second) : nullptr;
+ }
+
+ /// \brief Checks if entry for root digest exists.
+ [[nodiscard]] auto HasTree(
+ bazel_re::Digest const& root_digest) const noexcept -> bool {
+ return GetTree(root_digest) != nullptr;
+ }
+
+ /// \brief Add new `LocalTree` for given root digest. Does not overwrite if
+ /// a tree for the given root digest already exists.
+ /// \param root_digest The root digest to add the new tree for.
+ /// \param tree The new tree to add.
+ /// \returns true if the tree was successfully added or existed before.
+ [[nodiscard]] auto AddTree(bazel_re::Digest const& root_digest,
+ LocalTree&& tree) noexcept -> bool {
+ if (not HasTree(root_digest)) {
+ try {
+ std::unique_lock lock{mutex_};
+ trees_.emplace(root_digest, std::move(tree));
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "adding local tree to tree map failed with:\n{}",
+ ex.what());
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private:
+ ObjectInfoPool infos_; // pool to store each solid object info exactly once
+ std::unordered_map<bazel_re::Digest, LocalTree> trees_;
+ mutable std::shared_mutex mutex_;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_COMMON_LOCAL_TREE_MAP_HPP
diff --git a/src/buildtool/execution_api/local/TARGETS b/src/buildtool/execution_api/local/TARGETS
new file mode 100644
index 00000000..b3e54597
--- /dev/null
+++ b/src/buildtool/execution_api/local/TARGETS
@@ -0,0 +1,36 @@
+{ "config":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["config"]
+ , "hdrs": ["config.hpp"]
+ , "deps":
+ [ ["src/buildtool/logging", "logging"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ ]
+ , "stage": ["src", "buildtool", "execution_api", "local"]
+ }
+, "local":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["local"]
+ , "hdrs":
+ [ "file_storage.hpp"
+ , "local_api.hpp"
+ , "local_action.hpp"
+ , "local_response.hpp"
+ , "local_storage.hpp"
+ , "local_cas.hpp"
+ , "local_ac.hpp"
+ ]
+ , "srcs": ["local_action.cpp", "local_storage.cpp"]
+ , "deps":
+ [ ["@", "gsl-lite", "", "gsl-lite"]
+ , "config"
+ , ["src/buildtool/execution_api/common", "common"]
+ , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["src/buildtool/file_system", "system_command"]
+ , ["src/buildtool/file_system", "object_type"]
+ , ["src/buildtool/logging", "logging"]
+ ]
+ , "stage": ["src", "buildtool", "execution_api", "local"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/execution_api/local/config.hpp b/src/buildtool/execution_api/local/config.hpp
new file mode 100644
index 00000000..5f3a2a80
--- /dev/null
+++ b/src/buildtool/execution_api/local/config.hpp
@@ -0,0 +1,137 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_CONFIG_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_CONFIG_HPP
+
+#ifdef __unix__
+#include <pwd.h>
+#include <sys/types.h>
+#include <unistd.h>
+#else
+#error "Non-unix is not supported yet"
+#endif
+
+#include <filesystem>
+#include <functional>
+#include <string>
+#include <vector>
+
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+/// \brief Store global build system configuration.
+class LocalExecutionConfig {
+ public:
+ [[nodiscard]] static auto SetBuildRoot(
+ std::filesystem::path const& dir) noexcept -> bool {
+ if (FileSystemManager::IsRelativePath(dir)) {
+ Logger::Log(LogLevel::Error,
+ "Build root must be absolute path but got '{}'.",
+ dir.string());
+ return false;
+ }
+ build_root_ = dir;
+ return true;
+ }
+
+ [[nodiscard]] static auto SetDiskCache(
+ std::filesystem::path const& dir) noexcept -> bool {
+ if (FileSystemManager::IsRelativePath(dir)) {
+ Logger::Log(LogLevel::Error,
+ "Disk cache must be absolute path but got '{}'.",
+ dir.string());
+ return false;
+ }
+ disk_cache_ = dir;
+ return true;
+ }
+
+ [[nodiscard]] static auto SetLauncher(
+ std::vector<std::string> const& launcher) noexcept -> bool {
+ try {
+ launcher_ = launcher;
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "when setting the local launcher\n{}",
+ e.what());
+ return false;
+ }
+ return true;
+ }
+
+ [[nodiscard]] static auto SetKeepBuildDir(bool is_persistent) noexcept
+ -> bool {
+ keep_build_dir_ = is_persistent;
+ return true;
+ }
+
+ /// \brief User directory.
+ [[nodiscard]] static auto GetUserDir() noexcept -> std::filesystem::path {
+ if (user_root_.empty()) {
+ user_root_ = GetUserRoot() / ".cache" / "just";
+ }
+ return user_root_;
+ }
+
+ /// \brief Build directory, defaults to user directory if not set
+ [[nodiscard]] static auto GetBuildDir() noexcept -> std::filesystem::path {
+ if (build_root_.empty()) {
+ return GetUserDir();
+ }
+ return build_root_;
+ }
+
+ /// \brief Cache directory, defaults to user directory if not set
+ [[nodiscard]] static auto GetCacheDir() noexcept -> std::filesystem::path {
+ if (disk_cache_.empty()) {
+ return GetBuildDir();
+ }
+ return disk_cache_;
+ }
+
+ [[nodiscard]] static auto GetLauncher() noexcept
+ -> std::vector<std::string> {
+ return launcher_;
+ }
+
+ [[nodiscard]] static auto KeepBuildDir() noexcept -> bool {
+ return keep_build_dir_;
+ }
+
+ private:
+ // User root directory (Unix default: /home/${USER})
+ static inline std::filesystem::path user_root_{};
+
+ // Build root directory (default: empty)
+ static inline std::filesystem::path build_root_{};
+
+ // Disk cache directory (default: empty)
+ static inline std::filesystem::path disk_cache_{};
+
+ // Launcher to be prepended to action's command before executed.
+ // Default: ["env", "--"]
+ static inline std::vector<std::string> launcher_{"env", "--"};
+
+ // Persistent build directory option
+ static inline bool keep_build_dir_{false};
+
+ /// \brief Determine user root directory
+ [[nodiscard]] static inline auto GetUserRoot() noexcept
+ -> std::filesystem::path {
+ char const* root{nullptr};
+
+#ifdef __unix__
+ root = std::getenv("HOME");
+ if (root == nullptr) {
+ root = getpwuid(getuid())->pw_dir;
+ }
+#endif
+
+ if (root == nullptr) {
+ Logger::Log(LogLevel::Error, "Cannot determine user directory.");
+ std::exit(EXIT_FAILURE);
+ }
+
+ return root;
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_CONFIG_HPP
diff --git a/src/buildtool/execution_api/local/file_storage.hpp b/src/buildtool/execution_api/local/file_storage.hpp
new file mode 100644
index 00000000..07ac1204
--- /dev/null
+++ b/src/buildtool/execution_api/local/file_storage.hpp
@@ -0,0 +1,107 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_FILE_STORAGE_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_FILE_STORAGE_HPP
+
+#include <filesystem>
+#include <string>
+
+#include "src/buildtool/execution_api/common/execution_common.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+
+enum class StoreMode {
+ // First thread to write conflicting file wins.
+ FirstWins,
+ // Last thread to write conflicting file wins, effectively overwriting
+ // existing entries. NOTE: This might cause races if hard linking from
+ // stored files due to an issue with the interaction of rename(2) and
+ // link(2) (see: https://stackoverflow.com/q/69076026/1107763).
+ LastWins
+};
+
+template <ObjectType kType = ObjectType::File,
+ StoreMode kMode = StoreMode::FirstWins>
+class FileStorage {
+ public:
+ explicit FileStorage(std::filesystem::path storage_root) noexcept
+ : storage_root_{std::move(storage_root)} {}
+
+ /// \brief Add file to storage.
+ /// \returns true if file exists afterward.
+ [[nodiscard]] auto AddFromFile(
+ std::string const& id,
+ std::filesystem::path const& source_path) const noexcept -> bool {
+ return AtomicAdd(id, source_path);
+ }
+
+ /// \brief Add bytes to storage.
+ /// \returns true if file exists afterward.
+ [[nodiscard]] auto AddFromBytes(std::string const& id,
+ std::string const& bytes) const noexcept
+ -> bool {
+ return AtomicAdd(id, bytes);
+ }
+
+ [[nodiscard]] auto GetPath(std::string const& name) const noexcept
+ -> std::filesystem::path {
+ return storage_root_ / name;
+ }
+
+ private:
+ std::filesystem::path const storage_root_{};
+
+ /// \brief Add file to storage via copy and atomic rename.
+ /// If a race-condition occurs, the winning thread will be the one
+ /// performing the rename operation first or last, depending on kMode being
+ /// set to FirstWins or LastWins, respectively. All threads will signal
+ /// success.
+ /// \returns true if file exists afterward.
+ template <class T>
+ [[nodiscard]] auto AtomicAdd(std::string const& id,
+ T const& data) const noexcept -> bool {
+ auto file_path = storage_root_ / id;
+ if (kMode == StoreMode::LastWins or
+ not FileSystemManager::Exists(file_path)) {
+ auto unique_path = CreateUniquePath(file_path);
+ if (unique_path and
+ FileSystemManager::CreateDirectory(file_path.parent_path()) and
+ CreateFileFromData(*unique_path, data) and
+ StageFile(*unique_path, file_path)) {
+ Logger::Log(
+ LogLevel::Trace, "created entry {}.", file_path.string());
+ return true;
+ }
+ }
+ return FileSystemManager::IsFile(file_path);
+ }
+
+ /// \brief Create file from file path.
+ [[nodiscard]] static auto CreateFileFromData(
+ std::filesystem::path const& file_path,
+ std::filesystem::path const& other_path) noexcept -> bool {
+ return FileSystemManager::CopyFileAs<kType>(other_path, file_path);
+ }
+
+ /// \brief Create file from bytes.
+ [[nodiscard]] static auto CreateFileFromData(
+ std::filesystem::path const& file_path,
+ std::string const& bytes) noexcept -> bool {
+ return FileSystemManager::WriteFileAs<kType>(bytes, file_path);
+ }
+
+ /// \brief Stage file from source path to target path.
+ [[nodiscard]] static auto StageFile(
+ std::filesystem::path const& src_path,
+ std::filesystem::path const& dst_path) noexcept -> bool {
+ switch (kMode) {
+ case StoreMode::FirstWins:
+ // try rename source or delete it if the target already exists
+ return FileSystemManager::Rename(
+ src_path, dst_path, /*no_clobber=*/true) or
+ (FileSystemManager::IsFile(dst_path) and
+ FileSystemManager::RemoveFile(src_path));
+ case StoreMode::LastWins:
+ return FileSystemManager::Rename(src_path, dst_path);
+ }
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_FILE_STORAGE_HPP
diff --git a/src/buildtool/execution_api/local/local_ac.hpp b/src/buildtool/execution_api/local/local_ac.hpp
new file mode 100644
index 00000000..f319940a
--- /dev/null
+++ b/src/buildtool/execution_api/local/local_ac.hpp
@@ -0,0 +1,82 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_AC_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_AC_HPP
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/common/execution_common.hpp"
+#include "src/buildtool/execution_api/local/config.hpp"
+#include "src/buildtool/execution_api/local/file_storage.hpp"
+#include "src/buildtool/execution_api/local/local_cas.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+class LocalAC {
+ public:
+ explicit LocalAC(gsl::not_null<LocalCAS<ObjectType::File>*> cas) noexcept
+ : cas_{std::move(cas)} {};
+
+ LocalAC(gsl::not_null<LocalCAS<ObjectType::File>*> cas,
+ std::filesystem::path cache_root) noexcept
+ : cas_{std::move(cas)}, cache_root_{std::move(cache_root)} {}
+
+ LocalAC(LocalAC const&) = delete;
+ LocalAC(LocalAC&&) = delete;
+ auto operator=(LocalAC const&) -> LocalAC& = delete;
+ auto operator=(LocalAC &&) -> LocalAC& = delete;
+ ~LocalAC() noexcept = default;
+
+ [[nodiscard]] auto StoreResult(
+ bazel_re::Digest const& action_id,
+ bazel_re::ActionResult const& result) const noexcept -> bool {
+ auto bytes = result.SerializeAsString();
+ auto digest = cas_->StoreBlobFromBytes(bytes);
+ return (digest and file_store_.AddFromBytes(
+ action_id.hash(), digest->SerializeAsString()));
+ }
+
+ [[nodiscard]] auto CachedResult(bazel_re::Digest const& action_id)
+ const noexcept -> std::optional<bazel_re::ActionResult> {
+ auto entry_path = file_store_.GetPath(action_id.hash());
+ bazel_re::Digest digest{};
+ auto const entry =
+ FileSystemManager::ReadFile(entry_path, ObjectType::File);
+ if (not entry.has_value()) {
+ logger_.Emit(LogLevel::Debug,
+ "Cache miss, entry not found {}",
+ entry_path.string());
+ return std::nullopt;
+ }
+ if (not digest.ParseFromString(*entry)) {
+ logger_.Emit(LogLevel::Warning,
+ "Parsing cache entry failed failed for action {}",
+ action_id.hash());
+ return std::nullopt;
+ }
+ auto src_path = cas_->BlobPath(digest);
+ bazel_re::ActionResult result{};
+ if (src_path) {
+ auto const bytes = FileSystemManager::ReadFile(*src_path);
+ if (bytes.has_value() and result.ParseFromString(*bytes)) {
+ return result;
+ }
+ }
+ logger_.Emit(LogLevel::Warning,
+ "Parsing action result failed for action {}",
+ action_id.hash());
+ return std::nullopt;
+ }
+
+ private:
+ // The action cache stores the results of failed actions. For those to be
+ // overwritable by subsequent runs we need to choose the store mode "last
+ // wins" for the underlying file storage.
+ static constexpr auto kStoreMode = StoreMode::LastWins;
+
+ Logger logger_{"LocalAC"};
+ gsl::not_null<LocalCAS<ObjectType::File>*> cas_;
+ std::filesystem::path const cache_root_{
+ LocalExecutionConfig::GetCacheDir()};
+ FileStorage<ObjectType::File, kStoreMode> file_store_{cache_root_ / "ac"};
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_AC_HPP
diff --git a/src/buildtool/execution_api/local/local_action.cpp b/src/buildtool/execution_api/local/local_action.cpp
new file mode 100644
index 00000000..eac6ede8
--- /dev/null
+++ b/src/buildtool/execution_api/local/local_action.cpp
@@ -0,0 +1,295 @@
+#include "src/buildtool/execution_api/local/local_action.hpp"
+
+#include <algorithm>
+#include <filesystem>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/local/local_response.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+#include "src/buildtool/file_system/system_command.hpp"
+
+namespace {
+
+/// \brief Removes specified directory if KeepBuildDir() is not set.
+class BuildCleanupAnchor {
+ public:
+ explicit BuildCleanupAnchor(std::filesystem::path build_path) noexcept
+ : build_path{std::move(build_path)} {}
+ BuildCleanupAnchor(BuildCleanupAnchor const&) = delete;
+ BuildCleanupAnchor(BuildCleanupAnchor&&) = delete;
+ auto operator=(BuildCleanupAnchor const&) -> BuildCleanupAnchor& = delete;
+ auto operator=(BuildCleanupAnchor &&) -> BuildCleanupAnchor& = delete;
+ ~BuildCleanupAnchor() {
+ if (not LocalExecutionConfig::KeepBuildDir() and
+ not FileSystemManager::RemoveDirectory(build_path, true)) {
+ Logger::Log(LogLevel::Error,
+ "Could not cleanup build directory {}",
+ build_path.string());
+ }
+ }
+
+ private:
+ std::filesystem::path const build_path{};
+};
+
+} // namespace
+
+auto LocalAction::Execute(Logger const* logger) noexcept
+ -> IExecutionResponse::Ptr {
+ auto do_cache = CacheEnabled(cache_flag_);
+ auto action = CreateActionDigest(root_digest_, not do_cache);
+
+ if (logger != nullptr) {
+ logger->Emit(LogLevel::Trace,
+ "start execution\n"
+ " - exec_dir digest: {}\n"
+ " - action digest: {}",
+ root_digest_.hash(),
+ action.hash());
+ }
+
+ if (do_cache) {
+ if (auto result = storage_->CachedActionResult(action)) {
+ if (result->exit_code() == 0) {
+ return IExecutionResponse::Ptr{
+ new LocalResponse{action.hash(),
+ {std::move(*result), /*is_cached=*/true},
+ storage_}};
+ }
+ }
+ }
+
+ if (ExecutionEnabled(cache_flag_)) {
+ if (auto output = Run(action)) {
+ if (cache_flag_ == CacheFlag::PretendCached) {
+ // ensure the same id is created as if caching were enabled
+ auto action_id = CreateActionDigest(root_digest_, false).hash();
+ output->is_cached = true;
+ return IExecutionResponse::Ptr{new LocalResponse{
+ std::move(action_id), std::move(*output), storage_}};
+ }
+ return IExecutionResponse::Ptr{
+ new LocalResponse{action.hash(), std::move(*output), storage_}};
+ }
+ }
+
+ return nullptr;
+}
+
+auto LocalAction::Run(bazel_re::Digest const& action_id) const noexcept
+ -> std::optional<Output> {
+ auto exec_path = CreateUniquePath(LocalExecutionConfig::GetBuildDir() /
+ "exec_root" / action_id.hash());
+
+ if (not exec_path) {
+ return std::nullopt;
+ }
+
+ // anchor for cleaning up build directory at end of function (using RAII)
+ auto anchor = BuildCleanupAnchor(*exec_path);
+
+ auto const build_root = *exec_path / "build_root";
+ if (not CreateDirectoryStructure(build_root)) {
+ return std::nullopt;
+ }
+
+ if (cmdline_.empty()) {
+ logger_.Emit(LogLevel::Error, "malformed command line");
+ return std::nullopt;
+ }
+
+ auto cmdline = LocalExecutionConfig::GetLauncher();
+ std::copy(cmdline_.begin(), cmdline_.end(), std::back_inserter(cmdline));
+
+ SystemCommand system{"LocalExecution"};
+ auto const command_output =
+ system.Execute(cmdline, env_vars_, build_root, *exec_path);
+ if (command_output.has_value()) {
+ Output result{};
+ result.action.set_exit_code(command_output->return_value);
+ if (gsl::owner<bazel_re::Digest*> digest_ptr =
+ DigestFromFile(command_output->stdout_file)) {
+ result.action.set_allocated_stdout_digest(digest_ptr);
+ }
+ if (gsl::owner<bazel_re::Digest*> digest_ptr =
+ DigestFromFile(command_output->stderr_file)) {
+ result.action.set_allocated_stderr_digest(digest_ptr);
+ }
+
+ if (CollectAndStoreOutputs(&result.action, build_root)) {
+ if (cache_flag_ == CacheFlag::CacheOutput) {
+ if (not storage_->StoreActionResult(action_id, result.action)) {
+ logger_.Emit(LogLevel::Warning,
+ "failed to store action results");
+ }
+ }
+ }
+ return result;
+ }
+
+ logger_.Emit(LogLevel::Error, "failed to execute commands");
+
+ return std::nullopt;
+}
+
+auto LocalAction::StageFile(std::filesystem::path const& target_path,
+ Artifact::ObjectInfo const& info) const -> bool {
+ auto blob_path =
+ storage_->BlobPath(info.digest, IsExecutableObject(info.type));
+
+ return blob_path and
+ FileSystemManager::CreateDirectory(target_path.parent_path()) and
+ FileSystemManager::CreateFileHardlink(*blob_path, target_path);
+}
+
+auto LocalAction::StageInputFiles(
+ std::filesystem::path const& exec_path) const noexcept -> bool {
+ if (FileSystemManager::IsRelativePath(exec_path)) {
+ return false;
+ }
+
+ auto infos = storage_->ReadTreeInfos(root_digest_, exec_path);
+ if (not infos) {
+ return false;
+ }
+ for (std::size_t i{}; i < infos->first.size(); ++i) {
+ if (not StageFile(infos->first.at(i), infos->second.at(i))) {
+ return false;
+ }
+ }
+ return true;
+}
+
+auto LocalAction::CreateDirectoryStructure(
+ std::filesystem::path const& exec_path) const noexcept -> bool {
+ // clean execution directory
+ if (not FileSystemManager::RemoveDirectory(exec_path, true)) {
+ logger_.Emit(LogLevel::Error, "failed to clean exec_path");
+ return false;
+ }
+
+ // create process-exclusive execution directory
+ if (not FileSystemManager::CreateDirectoryExclusive(exec_path)) {
+ logger_.Emit(LogLevel::Error, "failed to exclusively create exec_path");
+ return false;
+ }
+
+ // stage input files to execution directory
+ if (not StageInputFiles(exec_path)) {
+ logger_.Emit(LogLevel::Error,
+ "failed to stage input files to exec_path");
+ return false;
+ }
+
+ // create output paths
+ for (auto const& local_path : output_files_) {
+ if (not FileSystemManager::CreateDirectory(
+ (exec_path / local_path).parent_path())) {
+ logger_.Emit(LogLevel::Error, "failed to create output directory");
+ return false;
+ }
+ }
+
+ return true;
+}
+
+auto LocalAction::CollectOutputFile(std::filesystem::path const& exec_path,
+ std::string const& local_path)
+ const noexcept -> std::optional<bazel_re::OutputFile> {
+ auto file_path = exec_path / local_path;
+ auto type = FileSystemManager::Type(file_path);
+ if (not type or not IsFileObject(*type)) {
+ Logger::Log(LogLevel::Error, "expected file at {}", local_path);
+ return std::nullopt;
+ }
+ bool is_executable = IsExecutableObject(*type);
+ auto digest = storage_->StoreBlob(file_path, is_executable);
+ if (digest) {
+ auto out_file = bazel_re::OutputFile{};
+ out_file.set_path(local_path);
+ out_file.set_allocated_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{*digest}});
+ out_file.set_is_executable(is_executable);
+ return out_file;
+ }
+ return std::nullopt;
+}
+
+auto LocalAction::CollectOutputDir(std::filesystem::path const& exec_path,
+ std::string const& local_path) const noexcept
+ -> std::optional<bazel_re::OutputDirectory> {
+ auto dir_path = exec_path / local_path;
+ auto type = FileSystemManager::Type(dir_path);
+ if (not type or not IsTreeObject(*type)) {
+ Logger::Log(LogLevel::Error, "expected directory at {}", local_path);
+ return std::nullopt;
+ }
+ auto digest = BazelMsgFactory::CreateDirectoryDigestFromLocalTree(
+ dir_path,
+ [this](auto path, auto is_exec) {
+ return storage_->StoreBlob(path, is_exec);
+ },
+ [this](auto bytes, auto dir) -> std::optional<bazel_re::Digest> {
+ auto digest = storage_->StoreBlob(bytes);
+ if (digest and not tree_map_->HasTree(*digest)) {
+ auto tree = tree_map_->CreateTree();
+ if (not BazelMsgFactory::ReadObjectInfosFromDirectory(
+ dir,
+ [&tree](auto path, auto info) {
+ return tree.AddInfo(path, info);
+ }) or
+ not tree_map_->AddTree(*digest, std::move(tree))) {
+ return std::nullopt;
+ }
+ }
+ return digest;
+ });
+ if (digest) {
+ auto out_dir = bazel_re::OutputDirectory{};
+ out_dir.set_path(local_path);
+ out_dir.set_allocated_tree_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{*digest}});
+ return out_dir;
+ }
+ return std::nullopt;
+}
+
+auto LocalAction::CollectAndStoreOutputs(
+ bazel_re::ActionResult* result,
+ std::filesystem::path const& exec_path) const noexcept -> bool {
+ logger_.Emit(LogLevel::Trace, "collecting outputs:");
+ for (auto const& path : output_files_) {
+ auto out_file = CollectOutputFile(exec_path, path);
+ if (not out_file) {
+ logger_.Emit(
+ LogLevel::Error, "could not collect output file {}", path);
+ return false;
+ }
+ auto const& digest = out_file->digest().hash();
+ logger_.Emit(LogLevel::Trace, " - file {}: {}", path, digest);
+ result->mutable_output_files()->Add(std::move(*out_file));
+ }
+ for (auto const& path : output_dirs_) {
+ auto out_dir = CollectOutputDir(exec_path, path);
+ if (not out_dir) {
+ logger_.Emit(
+ LogLevel::Error, "could not collect output dir {}", path);
+ return false;
+ }
+ auto const& digest = out_dir->tree_digest().hash();
+ logger_.Emit(LogLevel::Trace, " - dir {}: {}", path, digest);
+ result->mutable_output_directories()->Add(std::move(*out_dir));
+ }
+
+ return true;
+}
+
+auto LocalAction::DigestFromFile(std::filesystem::path const& file_path)
+ const noexcept -> gsl::owner<bazel_re::Digest*> {
+ if (auto digest = storage_->StoreBlob(file_path)) {
+ return new bazel_re::Digest{std::move(*digest)};
+ }
+ return nullptr;
+}
diff --git a/src/buildtool/execution_api/local/local_action.hpp b/src/buildtool/execution_api/local/local_action.hpp
new file mode 100644
index 00000000..3bf49fd2
--- /dev/null
+++ b/src/buildtool/execution_api/local/local_action.hpp
@@ -0,0 +1,122 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_ACTION_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_ACTION_HPP
+
+#include <chrono>
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp"
+#include "src/buildtool/execution_api/common/execution_action.hpp"
+#include "src/buildtool/execution_api/common/execution_response.hpp"
+#include "src/buildtool/execution_api/local/config.hpp"
+#include "src/buildtool/execution_api/local/local_storage.hpp"
+
+class LocalApi;
+
+/// \brief Action for local execution.
+class LocalAction final : public IExecutionAction {
+ friend class LocalApi;
+
+ public:
+ struct Output {
+ bazel_re::ActionResult action{};
+ bool is_cached{};
+ };
+
+ auto Execute(Logger const* logger) noexcept
+ -> IExecutionResponse::Ptr final;
+
+ void SetCacheFlag(CacheFlag flag) noexcept final { cache_flag_ = flag; }
+
+ void SetTimeout(std::chrono::milliseconds timeout) noexcept final {
+ timeout_ = timeout;
+ }
+
+ private:
+ Logger logger_{"LocalExecution"};
+ std::shared_ptr<LocalStorage> storage_;
+ std::shared_ptr<LocalTreeMap> tree_map_;
+ ArtifactDigest root_digest_{};
+ std::vector<std::string> cmdline_{};
+ std::vector<std::string> output_files_{};
+ std::vector<std::string> output_dirs_{};
+ std::map<std::string, std::string> env_vars_{};
+ std::vector<bazel_re::Platform_Property> properties_;
+ std::chrono::milliseconds timeout_{kDefaultTimeout};
+ CacheFlag cache_flag_{CacheFlag::CacheOutput};
+
+ LocalAction(std::shared_ptr<LocalStorage> storage,
+ std::shared_ptr<LocalTreeMap> tree_map,
+ ArtifactDigest root_digest,
+ std::vector<std::string> command,
+ std::vector<std::string> output_files,
+ std::vector<std::string> output_dirs,
+ std::map<std::string, std::string> env_vars,
+ std::map<std::string, std::string> const& properties) noexcept
+ : storage_{std::move(storage)},
+ tree_map_{std::move(tree_map)},
+ root_digest_{std::move(root_digest)},
+ cmdline_{std::move(command)},
+ output_files_{std::move(output_files)},
+ output_dirs_{std::move(output_dirs)},
+ env_vars_{std::move(env_vars)},
+ properties_{BazelMsgFactory::CreateMessageVectorFromMap<
+ bazel_re::Platform_Property>(properties)} {
+ std::sort(output_files_.begin(), output_files_.end());
+ std::sort(output_dirs_.begin(), output_dirs_.end());
+ }
+
+ [[nodiscard]] auto CreateActionDigest(bazel_re::Digest const& exec_dir,
+ bool do_not_cache)
+ -> bazel_re::Digest {
+ return BazelMsgFactory::CreateActionDigestFromCommandLine(
+ cmdline_,
+ exec_dir,
+ output_files_,
+ output_dirs_,
+ {} /*FIXME output node properties*/,
+ BazelMsgFactory::CreateMessageVectorFromMap<
+ bazel_re::Command_EnvironmentVariable>(env_vars_),
+ properties_,
+ do_not_cache,
+ timeout_);
+ }
+
+ [[nodiscard]] auto Run(bazel_re::Digest const& action_id) const noexcept
+ -> std::optional<Output>;
+
+ [[nodiscard]] auto StageFile(std::filesystem::path const& target_path,
+ Artifact::ObjectInfo const& info) const
+ -> bool;
+
+ /// \brief Stage input artifacts to the execution directory.
+ /// Stage artifacts and their parent directory structure from CAS to the
+ /// specified execution directory. The execution directory may no exist.
+ /// \param[in] exec_path Absolute path to the execution directory.
+ /// \returns Success indicator.
+ [[nodiscard]] auto StageInputFiles(
+ std::filesystem::path const& exec_path) const noexcept -> bool;
+
+ [[nodiscard]] auto CreateDirectoryStructure(
+ std::filesystem::path const& exec_path) const noexcept -> bool;
+
+ [[nodiscard]] auto CollectOutputFile(std::filesystem::path const& exec_path,
+ std::string const& local_path)
+ const noexcept -> std::optional<bazel_re::OutputFile>;
+
+ [[nodiscard]] auto CollectOutputDir(std::filesystem::path const& exec_path,
+ std::string const& local_path)
+ const noexcept -> std::optional<bazel_re::OutputDirectory>;
+
+ [[nodiscard]] auto CollectAndStoreOutputs(
+ bazel_re::ActionResult* result,
+ std::filesystem::path const& exec_path) const noexcept -> bool;
+
+ /// \brief Store file from path in file CAS and return pointer to digest.
+ [[nodiscard]] auto DigestFromFile(std::filesystem::path const& file_path)
+ const noexcept -> gsl::owner<bazel_re::Digest*>;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_ACTION_HPP
diff --git a/src/buildtool/execution_api/local/local_api.hpp b/src/buildtool/execution_api/local/local_api.hpp
new file mode 100644
index 00000000..96b96416
--- /dev/null
+++ b/src/buildtool/execution_api/local/local_api.hpp
@@ -0,0 +1,157 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_API_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_API_HPP
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob.hpp"
+#include "src/buildtool/execution_api/common/execution_api.hpp"
+#include "src/buildtool/execution_api/common/local_tree_map.hpp"
+#include "src/buildtool/execution_api/local/local_action.hpp"
+#include "src/buildtool/execution_api/local/local_storage.hpp"
+
+/// \brief API for local execution.
+class LocalApi final : public IExecutionApi {
+ public:
+ auto CreateAction(
+ ArtifactDigest const& root_digest,
+ std::vector<std::string> const& command,
+ std::vector<std::string> const& output_files,
+ std::vector<std::string> const& output_dirs,
+ std::map<std::string, std::string> const& env_vars,
+ std::map<std::string, std::string> const& properties) noexcept
+ -> IExecutionAction::Ptr final {
+ return IExecutionAction::Ptr{new LocalAction{storage_,
+ tree_map_,
+ root_digest,
+ command,
+ output_files,
+ output_dirs,
+ env_vars,
+ properties}};
+ }
+
+ [[nodiscard]] auto RetrieveToPaths(
+ std::vector<Artifact::ObjectInfo> const& artifacts_info,
+ std::vector<std::filesystem::path> const& output_paths) noexcept
+ -> bool final {
+ if (artifacts_info.size() != output_paths.size()) {
+ Logger::Log(LogLevel::Error,
+ "different number of digests and output paths.");
+ return false;
+ }
+
+ for (std::size_t i{}; i < artifacts_info.size(); ++i) {
+ auto const& info = artifacts_info[i];
+ if (IsTreeObject(info.type)) {
+ // read object infos from sub tree and call retrieve recursively
+ auto const infos =
+ storage_->ReadTreeInfos(info.digest, output_paths[i]);
+ if (not infos or
+ not RetrieveToPaths(infos->second, infos->first)) {
+ return false;
+ }
+ }
+ else {
+ auto const blob_path = storage_->BlobPath(
+ info.digest, IsExecutableObject(info.type));
+ if (not blob_path or
+ not FileSystemManager::CreateDirectory(
+ output_paths[i].parent_path()) or
+ not FileSystemManager::CopyFileAs(
+ *blob_path, output_paths[i], info.type)) {
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+ [[nodiscard]] auto RetrieveToFds(
+ std::vector<Artifact::ObjectInfo> const& artifacts_info,
+ std::vector<int> const& fds) noexcept -> bool final {
+ if (artifacts_info.size() != fds.size()) {
+ Logger::Log(LogLevel::Error,
+ "different number of digests and file descriptors.");
+ return false;
+ }
+
+ for (std::size_t i{}; i < artifacts_info.size(); ++i) {
+ auto fd = fds[i];
+ auto const& info = artifacts_info[i];
+
+ if (gsl::owner<FILE*> out = fdopen(fd, "wb")) { // NOLINT
+ auto const success = storage_->DumpToStream(info, out);
+ std::fclose(out);
+ if (not success) {
+ Logger::Log(LogLevel::Error,
+ "dumping {} {} to file descriptor {} failed.",
+ IsTreeObject(info.type) ? "tree" : "blob",
+ info.ToString(),
+ fd);
+ return false;
+ }
+ }
+ else {
+ Logger::Log(LogLevel::Error,
+ "dumping to file descriptor {} failed.",
+ fd);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ [[nodiscard]] auto Upload(BlobContainer const& blobs,
+ bool /*skip_find_missing*/) noexcept
+ -> bool final {
+ for (auto const& blob : blobs) {
+ auto cas_digest = storage_->StoreBlob(blob.data);
+ if (not cas_digest or not std::equal_to<bazel_re::Digest>{}(
+ *cas_digest, blob.digest)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ [[nodiscard]] auto UploadTree(
+ std::vector<DependencyGraph::NamedArtifactNodePtr> const&
+ artifacts) noexcept -> std::optional<ArtifactDigest> final {
+ BlobContainer blobs{};
+ auto tree = tree_map_->CreateTree();
+ auto digest = BazelMsgFactory::CreateDirectoryDigestFromTree(
+ artifacts,
+ [&blobs](BazelBlob&& blob) { blobs.Emplace(std::move(blob)); },
+ [&tree](auto path, auto info) { return tree.AddInfo(path, info); });
+ if (not digest) {
+ Logger::Log(LogLevel::Debug, "failed to create digest for tree.");
+ return std::nullopt;
+ }
+
+ if (not Upload(blobs, /*skip_find_missing=*/false)) {
+ Logger::Log(LogLevel::Debug, "failed to upload blobs for tree.");
+ return std::nullopt;
+ }
+
+ if (tree_map_->AddTree(*digest, std::move(tree))) {
+ return ArtifactDigest{std::move(*digest)};
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] auto IsAvailable(ArtifactDigest const& digest) const noexcept
+ -> bool final {
+ return storage_->BlobPath(digest, false).has_value();
+ }
+
+ private:
+ std::shared_ptr<LocalTreeMap> tree_map_{std::make_shared<LocalTreeMap>()};
+ std::shared_ptr<LocalStorage> storage_{
+ std::make_shared<LocalStorage>(tree_map_)};
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_API_HPP
diff --git a/src/buildtool/execution_api/local/local_cas.hpp b/src/buildtool/execution_api/local/local_cas.hpp
new file mode 100644
index 00000000..4a28e796
--- /dev/null
+++ b/src/buildtool/execution_api/local/local_cas.hpp
@@ -0,0 +1,103 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_CAS_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_CAS_HPP
+
+#include <sstream>
+#include <thread>
+
+#include "src/buildtool/common/artifact.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob.hpp"
+#include "src/buildtool/execution_api/common/execution_common.hpp"
+#include "src/buildtool/execution_api/local/config.hpp"
+#include "src/buildtool/execution_api/local/file_storage.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+template <ObjectType kType = ObjectType::File>
+class LocalCAS {
+ public:
+ LocalCAS() noexcept = default;
+
+ explicit LocalCAS(std::filesystem::path cache_root) noexcept
+ : cache_root_{std::move(cache_root)} {}
+
+ LocalCAS(LocalCAS const&) = delete;
+ LocalCAS(LocalCAS&&) = delete;
+ auto operator=(LocalCAS const&) -> LocalCAS& = delete;
+ auto operator=(LocalCAS &&) -> LocalCAS& = delete;
+ ~LocalCAS() noexcept = default;
+
+ [[nodiscard]] auto StoreBlobFromBytes(std::string const& bytes)
+ const noexcept -> std::optional<bazel_re::Digest> {
+ return StoreBlob(bytes);
+ }
+
+ [[nodiscard]] auto StoreBlobFromFile(std::filesystem::path const& file_path)
+ const noexcept -> std::optional<bazel_re::Digest> {
+ return StoreBlob(file_path);
+ }
+
+ [[nodiscard]] auto BlobPath(bazel_re::Digest const& digest) const noexcept
+ -> std::optional<std::filesystem::path> {
+ auto blob_path = file_store_.GetPath(digest.hash());
+ if (FileSystemManager::IsFile(blob_path)) {
+ return blob_path;
+ }
+ logger_.Emit(LogLevel::Debug, "Blob not found {}", digest.hash());
+ return std::nullopt;
+ }
+
+ private:
+ static constexpr char kSuffix = ToChar(kType);
+ Logger logger_{std::string{"LocalCAS"} + kSuffix};
+ std::filesystem::path const cache_root_{
+ LocalExecutionConfig::GetCacheDir()};
+ FileStorage<kType> file_store_{cache_root_ /
+ (std::string{"cas"} + kSuffix)};
+
+ [[nodiscard]] static auto CreateDigest(std::string const& bytes) noexcept
+ -> std::optional<bazel_re::Digest> {
+ return ArtifactDigest::Create(bytes);
+ }
+
+ [[nodiscard]] static auto CreateDigest(
+ std::filesystem::path const& file_path) noexcept
+ -> std::optional<bazel_re::Digest> {
+ auto const bytes = FileSystemManager::ReadFile(file_path);
+ if (bytes.has_value()) {
+ return ArtifactDigest::Create(*bytes);
+ }
+ return std::nullopt;
+ }
+
+ /// \brief Store blob from bytes to storage.
+ [[nodiscard]] auto StoreBlobData(std::string const& blob_id,
+ std::string const& bytes) const noexcept
+ -> bool {
+ return file_store_.AddFromBytes(blob_id, bytes);
+ }
+
+ /// \brief Store blob from file path to storage.
+ [[nodiscard]] auto StoreBlobData(
+ std::string const& blob_id,
+ std::filesystem::path const& file_path) const noexcept -> bool {
+ return file_store_.AddFromFile(blob_id, file_path);
+ }
+
+ /// \brief Store blob from unspecified data to storage.
+ template <class T>
+ [[nodiscard]] auto StoreBlob(T const& data) const noexcept
+ -> std::optional<bazel_re::Digest> {
+ auto digest = CreateDigest(data);
+ if (digest) {
+ if (StoreBlobData(digest->hash(), data)) {
+ return digest;
+ }
+ logger_.Emit(
+ LogLevel::Debug, "Failed to store blob {}.", digest->hash());
+ }
+ logger_.Emit(LogLevel::Debug, "Failed to create digest.");
+ return std::nullopt;
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_CAS_HPP
diff --git a/src/buildtool/execution_api/local/local_response.hpp b/src/buildtool/execution_api/local/local_response.hpp
new file mode 100644
index 00000000..9084be0b
--- /dev/null
+++ b/src/buildtool/execution_api/local/local_response.hpp
@@ -0,0 +1,101 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_RESPONSE_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_RESPONSE_HPP
+
+#include "src/buildtool/execution_api/common/execution_response.hpp"
+#include "src/buildtool/execution_api/local/local_action.hpp"
+#include "src/buildtool/execution_api/local/local_storage.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+
+/// \brief Response of a LocalAction.
+class LocalResponse final : public IExecutionResponse {
+ friend class LocalAction;
+
+ public:
+ auto Status() const noexcept -> StatusCode final {
+ return StatusCode::Success; // unused
+ }
+ auto HasStdErr() const noexcept -> bool final {
+ return (output_.action.stderr_digest().size_bytes() != 0);
+ }
+ auto HasStdOut() const noexcept -> bool final {
+ return (output_.action.stdout_digest().size_bytes() != 0);
+ }
+ auto StdErr() noexcept -> std::string final {
+ if (auto path = storage_->BlobPath(output_.action.stderr_digest(),
+ /*is_executable=*/false)) {
+ if (auto content = FileSystemManager::ReadFile(*path)) {
+ return std::move(*content);
+ }
+ }
+ Logger::Log(LogLevel::Debug, "reading stderr failed");
+ return {};
+ }
+ auto StdOut() noexcept -> std::string final {
+ if (auto path = storage_->BlobPath(output_.action.stdout_digest(),
+ /*is_executable=*/false)) {
+ if (auto content = FileSystemManager::ReadFile(*path)) {
+ return std::move(*content);
+ }
+ }
+ Logger::Log(LogLevel::Debug, "reading stdout failed");
+ return {};
+ }
+ auto ExitCode() const noexcept -> int final {
+ return output_.action.exit_code();
+ }
+ auto IsCached() const noexcept -> bool final { return output_.is_cached; };
+
+ auto ActionDigest() const noexcept -> std::string final {
+ return action_id_;
+ }
+
+ auto Artifacts() const noexcept -> ArtifactInfos final {
+ ArtifactInfos artifacts{};
+ auto const& action_result = output_.action;
+ artifacts.reserve(
+ static_cast<std::size_t>(action_result.output_files().size()));
+
+ // collect files and store them
+ for (auto const& file : action_result.output_files()) {
+ try {
+ artifacts.emplace(
+ file.path(),
+ Artifact::ObjectInfo{ArtifactDigest{file.digest()},
+ file.is_executable()
+ ? ObjectType::Executable
+ : ObjectType::File});
+ } catch (...) {
+ return {};
+ }
+ }
+
+ // collect directories and store them
+ for (auto const& dir : action_result.output_directories()) {
+ try {
+ artifacts.emplace(
+ dir.path(),
+ Artifact::ObjectInfo{ArtifactDigest{dir.tree_digest()},
+ ObjectType::Tree});
+ } catch (...) {
+ return {};
+ }
+ }
+
+ return artifacts;
+ };
+
+ private:
+ std::string action_id_{};
+ LocalAction::Output output_{};
+ gsl::not_null<std::shared_ptr<LocalStorage>> storage_;
+
+ explicit LocalResponse(
+ std::string action_id,
+ LocalAction::Output output,
+ gsl::not_null<std::shared_ptr<LocalStorage>> storage) noexcept
+ : action_id_{std::move(action_id)},
+ output_{std::move(output)},
+ storage_{std::move(storage)} {}
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_RESPONSE_HPP
diff --git a/src/buildtool/execution_api/local/local_storage.cpp b/src/buildtool/execution_api/local/local_storage.cpp
new file mode 100644
index 00000000..b4fe1658
--- /dev/null
+++ b/src/buildtool/execution_api/local/local_storage.cpp
@@ -0,0 +1,125 @@
+#include "src/buildtool/execution_api/local/local_api.hpp"
+
+namespace {
+
+[[nodiscard]] auto ReadDirectory(
+ gsl::not_null<LocalStorage const*> const& storage,
+ bazel_re::Digest const& digest) noexcept
+ -> std::optional<bazel_re::Directory> {
+ if (auto const path = storage->BlobPath(digest, /*is_executable=*/false)) {
+ if (auto const content = FileSystemManager::ReadFile(*path)) {
+ return BazelMsgFactory::MessageFromString<bazel_re::Directory>(
+ *content);
+ }
+ }
+ Logger::Log(
+ LogLevel::Error, "Directory {} not found in CAS", digest.hash());
+ return std::nullopt;
+}
+
+[[nodiscard]] auto TreeToStream(
+ gsl::not_null<LocalStorage const*> const& storage,
+ bazel_re::Digest const& tree_digest,
+ gsl::not_null<FILE*> const& stream) noexcept -> bool {
+ if (auto dir = ReadDirectory(storage, tree_digest)) {
+ if (auto data = BazelMsgFactory::DirectoryToString(*dir)) {
+ std::fwrite(data->data(), 1, data->size(), stream);
+ return true;
+ }
+ }
+ return false;
+}
+
+[[nodiscard]] auto BlobToStream(
+ gsl::not_null<LocalStorage const*> const& storage,
+ Artifact::ObjectInfo const& blob_info,
+ gsl::not_null<FILE*> const& stream) noexcept -> bool {
+ constexpr std::size_t kChunkSize{512};
+ if (auto const path = storage->BlobPath(
+ blob_info.digest, IsExecutableObject(blob_info.type))) {
+ std::string data(kChunkSize, '\0');
+ if (gsl::owner<FILE*> in = std::fopen(path->c_str(), "rb")) {
+ while (auto size = std::fread(data.data(), 1, kChunkSize, in)) {
+ std::fwrite(data.data(), 1, size, stream);
+ }
+ std::fclose(in);
+ return true;
+ }
+ }
+ return false;
+}
+
+} // namespace
+
+auto LocalStorage::ReadTreeInfos(
+ bazel_re::Digest const& tree_digest,
+ std::filesystem::path const& parent) const noexcept
+ -> std::optional<std::pair<std::vector<std::filesystem::path>,
+ std::vector<Artifact::ObjectInfo>>> {
+ std::vector<std::filesystem::path> paths{};
+ std::vector<Artifact::ObjectInfo> infos{};
+
+ auto store_info = [&paths, &infos](auto path, auto info) {
+ paths.emplace_back(path);
+ infos.emplace_back(info);
+ return true;
+ };
+
+ if (ReadObjectInfosRecursively(store_info, parent, tree_digest)) {
+ return std::make_pair(std::move(paths), std::move(infos));
+ }
+ return std::nullopt;
+}
+
+auto LocalStorage::ReadObjectInfosRecursively(
+ BazelMsgFactory::InfoStoreFunc const& store_info,
+ std::filesystem::path const& parent,
+ bazel_re::Digest const& digest) const noexcept -> bool {
+ // read from in-memory tree map
+ if (tree_map_) {
+ auto const* tree = tree_map_->GetTree(digest);
+ if (tree != nullptr) {
+ for (auto const& [path, info] : *tree) {
+ try {
+ if (IsTreeObject(info->type)
+ ? not ReadObjectInfosRecursively(
+ store_info, parent / path, info->digest)
+ : not store_info(parent / path, *info)) {
+ return false;
+ }
+ } catch (...) { // satisfy clang-tidy, store_info() could throw
+ return false;
+ }
+ }
+ return true;
+ }
+ Logger::Log(
+ LogLevel::Debug, "tree {} not found in tree map", digest.hash());
+ }
+
+ // fallback read from CAS and cache it in in-memory tree map
+ if (auto dir = ReadDirectory(this, digest)) {
+ auto tree = tree_map_ ? std::make_optional(tree_map_->CreateTree())
+ : std::nullopt;
+ return BazelMsgFactory::ReadObjectInfosFromDirectory(
+ *dir,
+ [this, &store_info, &parent, &tree](auto path, auto info) {
+ return IsTreeObject(info.type)
+ ? (not tree or tree->AddInfo(path, info)) and
+ ReadObjectInfosRecursively(
+ store_info,
+ parent / path,
+ info.digest)
+ : store_info(parent / path, info);
+ }) and
+ (not tree_map_ or tree_map_->AddTree(digest, std::move(*tree)));
+ }
+ return false;
+}
+
+auto LocalStorage::DumpToStream(
+ Artifact::ObjectInfo const& info,
+ gsl::not_null<FILE*> const& stream) const noexcept -> bool {
+ return IsTreeObject(info.type) ? TreeToStream(this, info.digest, stream)
+ : BlobToStream(this, info, stream);
+}
diff --git a/src/buildtool/execution_api/local/local_storage.hpp b/src/buildtool/execution_api/local/local_storage.hpp
new file mode 100644
index 00000000..0b90cf63
--- /dev/null
+++ b/src/buildtool/execution_api/local/local_storage.hpp
@@ -0,0 +1,109 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_STORAGE_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_STORAGE_HPP
+
+#include <optional>
+
+#include "src/buildtool/common/artifact.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp"
+#include "src/buildtool/execution_api/common/execution_common.hpp"
+#include "src/buildtool/execution_api/common/local_tree_map.hpp"
+#include "src/buildtool/execution_api/local/local_ac.hpp"
+#include "src/buildtool/execution_api/local/local_cas.hpp"
+
+class LocalStorage {
+ public:
+ explicit LocalStorage(
+ std::shared_ptr<LocalTreeMap> tree_map = nullptr) noexcept
+ : tree_map_{std::move(tree_map)} {}
+
+ explicit LocalStorage(
+ std::filesystem::path const& cache_root,
+ std::shared_ptr<LocalTreeMap> tree_map = nullptr) noexcept
+ : cas_file_{cache_root},
+ cas_exec_{cache_root},
+ ac_{&cas_file_, cache_root},
+ tree_map_{std::move(tree_map)} {}
+
+ /// \brief Store blob from file path with x-bit determined from file system.
+ [[nodiscard]] auto StoreBlob(std::filesystem::path const& file_path)
+ const noexcept -> std::optional<bazel_re::Digest> {
+ return StoreBlob(file_path, FileSystemManager::IsExecutable(file_path));
+ }
+
+ /// \brief Store blob from file path with x-bit.
+ [[nodiscard]] auto StoreBlob(std::filesystem::path const& file_path,
+ bool is_executable) const noexcept
+ -> std::optional<bazel_re::Digest> {
+ if (is_executable) {
+ return cas_exec_.StoreBlobFromFile(file_path);
+ }
+ return cas_file_.StoreBlobFromFile(file_path);
+ }
+
+ /// \brief Store blob from bytes with x-bit (default: non-executable).
+ [[nodiscard]] auto StoreBlob(std::string const& bytes,
+ bool is_executable = false) const noexcept
+ -> std::optional<bazel_re::Digest> {
+ return is_executable ? cas_exec_.StoreBlobFromBytes(bytes)
+ : cas_file_.StoreBlobFromBytes(bytes);
+ }
+
+ /// \brief Obtain blob path from digest with x-bit.
+ [[nodiscard]] auto BlobPath(bazel_re::Digest const& digest,
+ bool is_executable) const noexcept
+ -> std::optional<std::filesystem::path> {
+ auto const path = is_executable ? cas_exec_.BlobPath(digest)
+ : cas_file_.BlobPath(digest);
+ return path ? path : TrySyncBlob(digest, is_executable);
+ }
+
+ [[nodiscard]] auto StoreActionResult(
+ bazel_re::Digest const& action_id,
+ bazel_re::ActionResult const& result) const noexcept -> bool {
+ return ac_.StoreResult(action_id, result);
+ }
+
+ [[nodiscard]] auto CachedActionResult(bazel_re::Digest const& action_id)
+ const noexcept -> std::optional<bazel_re::ActionResult> {
+ return ac_.CachedResult(action_id);
+ }
+
+ [[nodiscard]] auto ReadTreeInfos(
+ bazel_re::Digest const& tree_digest,
+ std::filesystem::path const& parent) const noexcept
+ -> std::optional<std::pair<std::vector<std::filesystem::path>,
+ std::vector<Artifact::ObjectInfo>>>;
+
+ [[nodiscard]] auto DumpToStream(
+ Artifact::ObjectInfo const& info,
+ gsl::not_null<FILE*> const& stream) const noexcept -> bool;
+
+ private:
+ LocalCAS<ObjectType::File> cas_file_{};
+ LocalCAS<ObjectType::Executable> cas_exec_{};
+ LocalAC ac_{&cas_file_};
+ std::shared_ptr<LocalTreeMap> tree_map_;
+
+ /// \brief Try to sync blob between file CAS and executable CAS.
+ /// \param digest Blob digest.
+ /// \param to_executable Sync direction.
+ /// \returns Path to blob in target CAS.
+ [[nodiscard]] auto TrySyncBlob(bazel_re::Digest const& digest,
+ bool to_executable) const noexcept
+ -> std::optional<std::filesystem::path> {
+ std::optional<std::filesystem::path> const src_blob{
+ to_executable ? cas_file_.BlobPath(digest)
+ : cas_exec_.BlobPath(digest)};
+ if (src_blob and StoreBlob(*src_blob, to_executable)) {
+ return BlobPath(digest, to_executable);
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] auto ReadObjectInfosRecursively(
+ BazelMsgFactory::InfoStoreFunc const& store_info,
+ std::filesystem::path const& parent,
+ bazel_re::Digest const& digest) const noexcept -> bool;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_LOCAL_LOCAL_STORAGE_HPP
diff --git a/src/buildtool/execution_api/remote/TARGETS b/src/buildtool/execution_api/remote/TARGETS
new file mode 100644
index 00000000..6f9fc004
--- /dev/null
+++ b/src/buildtool/execution_api/remote/TARGETS
@@ -0,0 +1,59 @@
+{ "bazel_network":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["bazel_network"]
+ , "hdrs":
+ [ "bazel/bytestream_client.hpp"
+ , "bazel/bazel_client_common.hpp"
+ , "bazel/bazel_action.hpp"
+ , "bazel/bazel_response.hpp"
+ , "bazel/bazel_network.hpp"
+ , "bazel/bazel_ac_client.hpp"
+ , "bazel/bazel_cas_client.hpp"
+ , "bazel/bazel_execution_client.hpp"
+ ]
+ , "srcs":
+ [ "bazel/bazel_action.cpp"
+ , "bazel/bazel_response.cpp"
+ , "bazel/bazel_network.cpp"
+ , "bazel/bazel_ac_client.cpp"
+ , "bazel/bazel_cas_client.cpp"
+ , "bazel/bazel_execution_client.cpp"
+ ]
+ , "deps":
+ [ ["src/buildtool/common", "common"]
+ , ["src/buildtool/logging", "logging"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["src/buildtool/file_system", "object_type"]
+ , ["src/buildtool/execution_api/common", "common"]
+ , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"]
+ , ["@", "grpc", "", "grpc++"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "proto":
+ [ ["@", "bazel_remote_apis", "", "remote_execution_proto"]
+ , ["@", "googleapis", "", "google_bytestream_proto"]
+ ]
+ , "stage": ["src", "buildtool", "execution_api", "remote"]
+ }
+, "bazel":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["bazel"]
+ , "hdrs": ["bazel/bazel_api.hpp"]
+ , "srcs": ["bazel/bazel_api.cpp"]
+ , "deps":
+ [ "bazel_network"
+ , ["src/buildtool/execution_api/common", "common"]
+ , ["src/buildtool/execution_api/bazel_msg", "bazel_msg"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "execution_api", "remote"]
+ }
+, "config":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["config"]
+ , "hdrs": ["config.hpp"]
+ , "deps":
+ [["src/buildtool/logging", "logging"], ["@", "gsl-lite", "", "gsl-lite"]]
+ , "stage": ["src", "buildtool", "execution_api", "remote"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_ac_client.cpp b/src/buildtool/execution_api/remote/bazel/bazel_ac_client.cpp
new file mode 100644
index 00000000..d4c5095d
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_ac_client.cpp
@@ -0,0 +1,75 @@
+#include "src/buildtool/execution_api/remote/bazel/bazel_ac_client.hpp"
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_client_common.hpp"
+
+BazelAcClient::BazelAcClient(std::string const& server,
+ Port port,
+ std::string const& user,
+ std::string const& pwd) noexcept {
+ stub_ = bazel_re::ActionCache::NewStub(
+ CreateChannelWithCredentials(server, port, user, pwd));
+}
+
+auto BazelAcClient::GetActionResult(
+ std::string const& instance_name,
+ bazel_re::Digest const& action_digest,
+ bool inline_stdout,
+ bool inline_stderr,
+ std::vector<std::string> const& inline_output_files) noexcept
+ -> std::optional<bazel_re::ActionResult> {
+ bazel_re::GetActionResultRequest request{};
+ request.set_instance_name(instance_name);
+ request.set_allocated_action_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{action_digest}});
+ request.set_inline_stdout(inline_stdout);
+ request.set_inline_stderr(inline_stderr);
+ std::copy(inline_output_files.begin(),
+ inline_output_files.end(),
+ pb::back_inserter(request.mutable_inline_output_files()));
+
+ grpc::ClientContext context;
+ bazel_re::ActionResult response;
+ grpc::Status status = stub_->GetActionResult(&context, request, &response);
+
+ if (not status.ok()) {
+ if (status.error_code() == grpc::StatusCode::NOT_FOUND) {
+ logger_.Emit(
+ LogLevel::Debug, "cache miss '{}'", status.error_message());
+ }
+ else {
+ LogStatus(&logger_, LogLevel::Debug, status);
+ }
+ return std::nullopt;
+ }
+ return response;
+}
+
+auto BazelAcClient::UpdateActionResult(std::string const& instance_name,
+ bazel_re::Digest const& action_digest,
+ bazel_re::ActionResult const& result,
+ int priority) noexcept
+ -> std::optional<bazel_re::ActionResult> {
+ auto policy = std::make_unique<bazel_re::ResultsCachePolicy>();
+ policy->set_priority(priority);
+
+ bazel_re::UpdateActionResultRequest request{};
+ request.set_instance_name(instance_name);
+ request.set_allocated_action_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{action_digest}});
+ request.set_allocated_action_result(gsl::owner<bazel_re::ActionResult*>{
+ new bazel_re::ActionResult{result}});
+ request.set_allocated_results_cache_policy(policy.release());
+
+ grpc::ClientContext context;
+ bazel_re::ActionResult response;
+ grpc::Status status =
+ stub_->UpdateActionResult(&context, request, &response);
+
+ if (not status.ok()) {
+ LogStatus(&logger_, LogLevel::Debug, status);
+ return std::nullopt;
+ }
+ return response;
+}
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_ac_client.hpp b/src/buildtool/execution_api/remote/bazel/bazel_ac_client.hpp
new file mode 100644
index 00000000..b9514d8a
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_ac_client.hpp
@@ -0,0 +1,41 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_AC_CLIENT_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_AC_CLIENT_HPP
+
+#include <functional>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+/// Implements client side for serivce defined here:
+/// https://github.com/bazelbuild/bazel/blob/4b6ad34dbba15dacebfb6cbf76fa741649cdb007/third_party/remoteapis/build/bazel/remote/execution/v2/remote_execution.proto#L137
+class BazelAcClient {
+ public:
+ BazelAcClient(std::string const& server,
+ Port port,
+ std::string const& user = "",
+ std::string const& pwd = "") noexcept;
+
+ [[nodiscard]] auto GetActionResult(
+ std::string const& instance_name,
+ bazel_re::Digest const& action_digest,
+ bool inline_stdout,
+ bool inline_stderr,
+ std::vector<std::string> const& inline_output_files) noexcept
+ -> std::optional<bazel_re::ActionResult>;
+
+ [[nodiscard]] auto UpdateActionResult(std::string const& instance_name,
+ bazel_re::Digest const& digest,
+ bazel_re::ActionResult const& result,
+ int priority) noexcept
+ -> std::optional<bazel_re::ActionResult>;
+
+ private:
+ std::unique_ptr<bazel_re::ActionCache::Stub> stub_;
+ Logger logger_{"BazelAcClient"};
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_AC_CLIENT_HPP
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_action.cpp b/src/buildtool/execution_api/remote/bazel/bazel_action.cpp
new file mode 100644
index 00000000..34fc5380
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_action.cpp
@@ -0,0 +1,94 @@
+#include "src/buildtool/execution_api/remote/bazel/bazel_action.hpp"
+
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_response.hpp"
+
+BazelAction::BazelAction(
+ std::shared_ptr<BazelNetwork> network,
+ std::shared_ptr<LocalTreeMap> tree_map,
+ bazel_re::Digest root_digest,
+ std::vector<std::string> command,
+ std::vector<std::string> output_files,
+ std::vector<std::string> output_dirs,
+ std::map<std::string, std::string> const& env_vars,
+ std::map<std::string, std::string> const& properties) noexcept
+ : network_{std::move(network)},
+ tree_map_{std::move(tree_map)},
+ root_digest_{std::move(root_digest)},
+ cmdline_{std::move(command)},
+ output_files_{std::move(output_files)},
+ output_dirs_{std::move(output_dirs)},
+ env_vars_{BazelMsgFactory::CreateMessageVectorFromMap<
+ bazel_re::Command_EnvironmentVariable>(env_vars)},
+ properties_{BazelMsgFactory::CreateMessageVectorFromMap<
+ bazel_re::Platform_Property>(properties)} {
+ std::sort(output_files_.begin(), output_files_.end());
+ std::sort(output_dirs_.begin(), output_dirs_.end());
+}
+
+auto BazelAction::Execute(Logger const* logger) noexcept
+ -> IExecutionResponse::Ptr {
+ BlobContainer blobs{};
+ auto do_cache = CacheEnabled(cache_flag_);
+ auto action = CreateBundlesForAction(&blobs, root_digest_, not do_cache);
+
+ if (logger != nullptr) {
+ logger->Emit(LogLevel::Trace,
+ "start execution\n"
+ " - exec_dir digest: {}\n"
+ " - action digest: {}",
+ root_digest_.hash(),
+ action.hash());
+ }
+
+ if (do_cache) {
+ if (auto result =
+ network_->GetCachedActionResult(action, output_files_)) {
+ if (result->exit_code() == 0) {
+ return IExecutionResponse::Ptr{new BazelResponse{
+ action.hash(), network_, tree_map_, {*result, true}}};
+ }
+ }
+ }
+
+ if (ExecutionEnabled(cache_flag_) and network_->UploadBlobs(blobs)) {
+ if (auto output = network_->ExecuteBazelActionSync(action)) {
+ if (cache_flag_ == CacheFlag::PretendCached) {
+ // ensure the same id is created as if caching were enabled
+ auto action_id =
+ CreateBundlesForAction(nullptr, root_digest_, false).hash();
+ output->cached_result = true;
+ return IExecutionResponse::Ptr{
+ new BazelResponse{std::move(action_id),
+ network_,
+ tree_map_,
+ std::move(*output)}};
+ }
+ return IExecutionResponse::Ptr{new BazelResponse{
+ action.hash(), network_, tree_map_, std::move(*output)}};
+ }
+ }
+
+ return nullptr;
+}
+
+auto BazelAction::CreateBundlesForAction(BlobContainer* blobs,
+ bazel_re::Digest const& exec_dir,
+ bool do_not_cache) const noexcept
+ -> bazel_re::Digest {
+ return BazelMsgFactory::CreateActionDigestFromCommandLine(
+ cmdline_,
+ exec_dir,
+ output_files_,
+ output_dirs_,
+ {} /*FIXME output node properties*/,
+ env_vars_,
+ properties_,
+ do_not_cache,
+ timeout_,
+ blobs == nullptr ? std::nullopt
+ : std::make_optional([&blobs](BazelBlob&& blob) {
+ blobs->Emplace(std::move(blob));
+ }));
+}
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_action.hpp b/src/buildtool/execution_api/remote/bazel/bazel_action.hpp
new file mode 100644
index 00000000..7eb9a9e0
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_action.hpp
@@ -0,0 +1,54 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_ACTION_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_ACTION_HPP
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_network.hpp"
+
+class BazelApi;
+
+/// \brief Bazel implementation of the abstract Execution Action.
+/// Uploads all dependencies, creates a Bazel Action and executes it.
+class BazelAction final : public IExecutionAction {
+ friend class BazelApi;
+
+ public:
+ auto Execute(Logger const* logger) noexcept
+ -> IExecutionResponse::Ptr final;
+ void SetCacheFlag(CacheFlag flag) noexcept final { cache_flag_ = flag; }
+ void SetTimeout(std::chrono::milliseconds timeout) noexcept final {
+ timeout_ = timeout;
+ }
+
+ private:
+ std::shared_ptr<BazelNetwork> network_;
+ std::shared_ptr<LocalTreeMap> tree_map_;
+ bazel_re::Digest const root_digest_;
+ std::vector<std::string> const cmdline_;
+ std::vector<std::string> output_files_;
+ std::vector<std::string> output_dirs_;
+ std::vector<bazel_re::Command_EnvironmentVariable> env_vars_;
+ std::vector<bazel_re::Platform_Property> properties_;
+ CacheFlag cache_flag_{CacheFlag::CacheOutput};
+ std::chrono::milliseconds timeout_{kDefaultTimeout};
+
+ BazelAction(std::shared_ptr<BazelNetwork> network,
+ std::shared_ptr<LocalTreeMap> tree_map,
+ bazel_re::Digest root_digest,
+ std::vector<std::string> command,
+ std::vector<std::string> output_files,
+ std::vector<std::string> output_dirs,
+ std::map<std::string, std::string> const& env_vars,
+ std::map<std::string, std::string> const& properties) noexcept;
+
+ [[nodiscard]] auto CreateBundlesForAction(BlobContainer* blobs,
+ bazel_re::Digest const& exec_dir,
+ bool do_not_cache) const noexcept
+ -> bazel_re::Digest;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_ACTION_HPP
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_api.cpp b/src/buildtool/execution_api/remote/bazel/bazel_api.cpp
new file mode 100644
index 00000000..990d6067
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_api.cpp
@@ -0,0 +1,177 @@
+#include "src/buildtool/execution_api/remote/bazel/bazel_api.hpp"
+
+#include <algorithm>
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_ac_client.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_action.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_network.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_response.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+BazelApi::BazelApi(std::string const& instance_name,
+ std::string const& host,
+ Port port,
+ ExecutionConfiguration const& exec_config) noexcept {
+ tree_map_ = std::make_shared<LocalTreeMap>();
+ network_ = std::make_shared<BazelNetwork>(
+ instance_name, host, port, exec_config, tree_map_);
+}
+
+// implement move constructor in cpp, where all members are complete types
+BazelApi::BazelApi(BazelApi&& other) noexcept = default;
+
+// implement destructor in cpp, where all members are complete types
+BazelApi::~BazelApi() = default;
+
+auto BazelApi::CreateAction(
+ ArtifactDigest const& root_digest,
+ std::vector<std::string> const& command,
+ std::vector<std::string> const& output_files,
+ std::vector<std::string> const& output_dirs,
+ std::map<std::string, std::string> const& env_vars,
+ std::map<std::string, std::string> const& properties) noexcept
+ -> IExecutionAction::Ptr {
+ return std::unique_ptr<BazelAction>{new BazelAction{network_,
+ tree_map_,
+ root_digest,
+ command,
+ output_files,
+ output_dirs,
+ env_vars,
+ properties}};
+}
+
+[[nodiscard]] auto BazelApi::RetrieveToPaths(
+ std::vector<Artifact::ObjectInfo> const& artifacts_info,
+ std::vector<std::filesystem::path> const& output_paths) noexcept -> bool {
+ if (artifacts_info.size() != output_paths.size()) {
+ Logger::Log(LogLevel::Error,
+ "different number of digests and output paths.");
+ return false;
+ }
+
+ // Obtain file digests from artifact infos
+ std::vector<bazel_re::Digest> file_digests{};
+ for (std::size_t i{}; i < artifacts_info.size(); ++i) {
+ auto const& info = artifacts_info[i];
+ if (IsTreeObject(info.type)) {
+ // read object infos from sub tree and call retrieve recursively
+ auto const infos =
+ network_->ReadTreeInfos(info.digest, output_paths[i]);
+ if (not infos or not RetrieveToPaths(infos->second, infos->first)) {
+ return false;
+ }
+ }
+ else {
+ file_digests.emplace_back(info.digest);
+ }
+ }
+
+ // Request file blobs
+ auto size = file_digests.size();
+ auto reader = network_->ReadBlobs(std::move(file_digests));
+ auto blobs = reader.Next();
+ std::size_t count{};
+ while (not blobs.empty()) {
+ if (count + blobs.size() > size) {
+ Logger::Log(LogLevel::Error, "received more blobs than requested.");
+ return false;
+ }
+ for (std::size_t pos = 0; pos < blobs.size(); ++pos) {
+ auto gpos = count + pos;
+ auto const& type = artifacts_info[gpos].type;
+ if (not FileSystemManager::WriteFileAs(
+ blobs[pos].data, output_paths[gpos], type)) {
+ return false;
+ }
+ }
+ count += blobs.size();
+ blobs = reader.Next();
+ }
+
+ if (count != size) {
+ Logger::Log(LogLevel::Error, "could not retrieve all requested blobs.");
+ return false;
+ }
+
+ return true;
+}
+
+[[nodiscard]] auto BazelApi::RetrieveToFds(
+ std::vector<Artifact::ObjectInfo> const& artifacts_info,
+ std::vector<int> const& fds) noexcept -> bool {
+ if (artifacts_info.size() != fds.size()) {
+ Logger::Log(LogLevel::Error,
+ "different number of digests and file descriptors.");
+ return false;
+ }
+
+ for (std::size_t i{}; i < artifacts_info.size(); ++i) {
+ auto fd = fds[i];
+ auto const& info = artifacts_info[i];
+
+ if (gsl::owner<FILE*> out = fdopen(fd, "wb")) { // NOLINT
+ auto const success = network_->DumpToStream(info, out);
+ std::fclose(out);
+ if (not success) {
+ Logger::Log(LogLevel::Error,
+ "dumping {} {} to file descriptor {} failed.",
+ IsTreeObject(info.type) ? "tree" : "blob",
+ info.ToString(),
+ fd);
+ return false;
+ }
+ }
+ else {
+ Logger::Log(
+ LogLevel::Error, "opening file descriptor {} failed.", fd);
+ return false;
+ }
+ }
+ return true;
+}
+
+[[nodiscard]] auto BazelApi::Upload(BlobContainer const& blobs,
+ bool skip_find_missing) noexcept -> bool {
+ return network_->UploadBlobs(blobs, skip_find_missing);
+}
+
+[[nodiscard]] auto BazelApi::UploadTree(
+ std::vector<DependencyGraph::NamedArtifactNodePtr> const&
+ artifacts) noexcept -> std::optional<ArtifactDigest> {
+ BlobContainer blobs{};
+ auto tree = tree_map_->CreateTree();
+ auto digest = BazelMsgFactory::CreateDirectoryDigestFromTree(
+ artifacts,
+ [&blobs](BazelBlob&& blob) { blobs.Emplace(std::move(blob)); },
+ [&tree](auto path, auto info) { return tree.AddInfo(path, info); });
+ if (not digest) {
+ Logger::Log(LogLevel::Debug, "failed to create digest for tree.");
+ return std::nullopt;
+ }
+ if (not Upload(blobs, /*skip_find_missing=*/false)) {
+ Logger::Log(LogLevel::Debug, "failed to upload blobs for tree.");
+ return std::nullopt;
+ }
+ if (tree_map_->AddTree(*digest, std::move(tree))) {
+ return ArtifactDigest{std::move(*digest)};
+ }
+ return std::nullopt;
+}
+
+[[nodiscard]] auto BazelApi::IsAvailable(
+ ArtifactDigest const& digest) const noexcept -> bool {
+ return network_->IsAvailable(digest);
+}
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_api.hpp b/src/buildtool/execution_api/remote/bazel/bazel_api.hpp
new file mode 100644
index 00000000..1405737b
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_api.hpp
@@ -0,0 +1,65 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_API_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_API_HPP
+
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/artifact_digest.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp"
+#include "src/buildtool/execution_api/common/execution_api.hpp"
+#include "src/buildtool/execution_api/common/local_tree_map.hpp"
+
+// forward declaration for actual implementations
+class BazelNetwork;
+struct ExecutionConfiguration;
+
+/// \brief Bazel implementation of the abstract Execution API.
+class BazelApi final : public IExecutionApi {
+ public:
+ BazelApi(std::string const& instance_name,
+ std::string const& host,
+ Port port,
+ ExecutionConfiguration const& exec_config) noexcept;
+ BazelApi(BazelApi const&) = delete;
+ BazelApi(BazelApi&& other) noexcept;
+ auto operator=(BazelApi const&) -> BazelApi& = delete;
+ auto operator=(BazelApi &&) -> BazelApi& = delete;
+ ~BazelApi() final;
+
+ auto CreateAction(
+ ArtifactDigest const& root_digest,
+ std::vector<std::string> const& command,
+ std::vector<std::string> const& output_files,
+ std::vector<std::string> const& output_dirs,
+ std::map<std::string, std::string> const& env_vars,
+ std::map<std::string, std::string> const& properties) noexcept
+ -> IExecutionAction::Ptr final;
+
+ [[nodiscard]] auto RetrieveToPaths(
+ std::vector<Artifact::ObjectInfo> const& artifacts_info,
+ std::vector<std::filesystem::path> const& output_paths) noexcept
+ -> bool final;
+
+ [[nodiscard]] auto RetrieveToFds(
+ std::vector<Artifact::ObjectInfo> const& artifacts_info,
+ std::vector<int> const& fds) noexcept -> bool final;
+
+ [[nodiscard]] auto Upload(BlobContainer const& blobs,
+ bool skip_find_missing) noexcept -> bool final;
+
+ [[nodiscard]] auto UploadTree(
+ std::vector<DependencyGraph::NamedArtifactNodePtr> const&
+ artifacts) noexcept -> std::optional<ArtifactDigest> final;
+
+ [[nodiscard]] auto IsAvailable(ArtifactDigest const& digest) const noexcept
+ -> bool final;
+
+ private:
+ std::shared_ptr<BazelNetwork> network_;
+ std::shared_ptr<LocalTreeMap> tree_map_;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_API_HPP
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp
new file mode 100644
index 00000000..87ceb4ae
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.cpp
@@ -0,0 +1,354 @@
+#include "src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp"
+
+#include "grpcpp/grpcpp.h"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/common/execution_common.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_client_common.hpp"
+
+namespace {
+
+[[nodiscard]] auto ToResourceName(std::string const& instance_name,
+ bazel_re::Digest const& digest) noexcept
+ -> std::string {
+ return fmt::format(
+ "{}/blobs/{}/{}", instance_name, digest.hash(), digest.size_bytes());
+}
+
+} // namespace
+
+BazelCasClient::BazelCasClient(std::string const& server,
+ Port port,
+ std::string const& user,
+ std::string const& pwd) noexcept
+ : stream_{std::make_unique<ByteStreamClient>(server, port, user, pwd)} {
+ stub_ = bazel_re::ContentAddressableStorage::NewStub(
+ CreateChannelWithCredentials(server, port, user, pwd));
+}
+
+auto BazelCasClient::FindMissingBlobs(
+ std::string const& instance_name,
+ std::vector<bazel_re::Digest> const& digests) noexcept
+ -> std::vector<bazel_re::Digest> {
+ return FindMissingBlobs(instance_name, digests.begin(), digests.end());
+}
+
+auto BazelCasClient::FindMissingBlobs(
+ std::string const& instance_name,
+ BlobContainer::DigestList const& digests) noexcept
+ -> std::vector<bazel_re::Digest> {
+ return FindMissingBlobs(instance_name, digests.begin(), digests.end());
+}
+
+auto BazelCasClient::BatchUpdateBlobs(
+ std::string const& instance_name,
+ std::vector<BazelBlob>::const_iterator const& begin,
+ std::vector<BazelBlob>::const_iterator const& end) noexcept
+ -> std::vector<bazel_re::Digest> {
+ return DoBatchUpdateBlobs(instance_name, begin, end);
+}
+
+auto BazelCasClient::BatchUpdateBlobs(
+ std::string const& instance_name,
+ BlobContainer::iterator const& begin,
+ BlobContainer::iterator const& end) noexcept
+ -> std::vector<bazel_re::Digest> {
+ return DoBatchUpdateBlobs(instance_name, begin, end);
+}
+
+auto BazelCasClient::BatchUpdateBlobs(
+ std::string const& instance_name,
+ BlobContainer::RelatedBlobList::iterator const& begin,
+ BlobContainer::RelatedBlobList::iterator const& end) noexcept
+ -> std::vector<bazel_re::Digest> {
+ return DoBatchUpdateBlobs(instance_name, begin, end);
+}
+
+auto BazelCasClient::BatchReadBlobs(
+ std::string const& instance_name,
+ std::vector<bazel_re::Digest>::const_iterator const& begin,
+ std::vector<bazel_re::Digest>::const_iterator const& end) noexcept
+ -> std::vector<BazelBlob> {
+ auto request =
+ CreateRequest<bazel_re::BatchReadBlobsRequest, bazel_re::Digest>(
+ instance_name, begin, end);
+ grpc::ClientContext context;
+ bazel_re::BatchReadBlobsResponse response;
+ grpc::Status status = stub_->BatchReadBlobs(&context, request, &response);
+
+ std::vector<BazelBlob> result{};
+ if (status.ok()) {
+ result =
+ ProcessBatchResponse<BazelBlob,
+ bazel_re::BatchReadBlobsResponse_Response>(
+ response,
+ [](std::vector<BazelBlob>* v,
+ bazel_re::BatchReadBlobsResponse_Response const& r) {
+ v->emplace_back(r.digest(), r.data());
+ });
+ }
+ else {
+ LogStatus(&logger_, LogLevel::Debug, status);
+ }
+
+ return result;
+}
+
+auto BazelCasClient::GetTree(std::string const& instance_name,
+ bazel_re::Digest const& root_digest,
+ std::int32_t page_size,
+ std::string const& page_token) noexcept
+ -> std::vector<bazel_re::Directory> {
+ auto request =
+ CreateGetTreeRequest(instance_name, root_digest, page_size, page_token);
+
+ grpc::ClientContext context;
+ bazel_re::GetTreeResponse response;
+ auto stream = stub_->GetTree(&context, request);
+
+ std::vector<bazel_re::Directory> result;
+ while (stream->Read(&response)) {
+ result = ProcessResponseContents<bazel_re::Directory>(response);
+ auto const& next_page_token = response.next_page_token();
+ if (!next_page_token.empty()) {
+ // recursively call this function with token for next page
+ auto next_result =
+ GetTree(instance_name, root_digest, page_size, next_page_token);
+ std::move(next_result.begin(),
+ next_result.end(),
+ std::back_inserter(result));
+ }
+ }
+
+ auto status = stream->Finish();
+ if (not status.ok()) {
+ LogStatus(&logger_, LogLevel::Debug, status);
+ }
+
+ return result;
+}
+
+auto BazelCasClient::UpdateSingleBlob(std::string const& instance_name,
+ BazelBlob const& blob) noexcept -> bool {
+ thread_local static std::string uuid{};
+ if (uuid.empty()) {
+ auto id = CreateProcessUniqueId();
+ if (not id) {
+ logger_.Emit(LogLevel::Debug, "Failed creating process unique id.");
+ return false;
+ }
+ uuid = CreateUUIDVersion4(*id);
+ }
+ return stream_->Write(fmt::format("{}/uploads/{}/blobs/{}/{}",
+ instance_name,
+ uuid,
+ blob.digest.hash(),
+ blob.digest.size_bytes()),
+ blob.data);
+}
+
+auto BazelCasClient::IncrementalReadSingleBlob(
+ std::string const& instance_name,
+ bazel_re::Digest const& digest) noexcept
+ -> ByteStreamClient::IncrementalReader {
+ return stream_->IncrementalRead(ToResourceName(instance_name, digest));
+}
+
+auto BazelCasClient::ReadSingleBlob(std::string const& instance_name,
+ bazel_re::Digest const& digest) noexcept
+ -> std::optional<BazelBlob> {
+ if (auto data = stream_->Read(ToResourceName(instance_name, digest))) {
+ return BazelBlob{ArtifactDigest::Create(*data), std::move(*data)};
+ }
+ return std::nullopt;
+}
+
+template <class T_OutputIter>
+auto BazelCasClient::FindMissingBlobs(std::string const& instance_name,
+ T_OutputIter const& start,
+ T_OutputIter const& end) noexcept
+ -> std::vector<bazel_re::Digest> {
+ auto request =
+ CreateRequest<bazel_re::FindMissingBlobsRequest, bazel_re::Digest>(
+ instance_name, start, end);
+
+ grpc::ClientContext context;
+ bazel_re::FindMissingBlobsResponse response;
+ grpc::Status status = stub_->FindMissingBlobs(&context, request, &response);
+
+ std::vector<bazel_re::Digest> result{};
+ if (status.ok()) {
+ result = ProcessResponseContents<bazel_re::Digest>(response);
+ }
+ else {
+ LogStatus(&logger_, LogLevel::Debug, status);
+ }
+
+ return result;
+}
+
+template <class T_OutputIter>
+auto BazelCasClient::DoBatchUpdateBlobs(std::string const& instance_name,
+ T_OutputIter const& start,
+ T_OutputIter const& end) noexcept
+ -> std::vector<bazel_re::Digest> {
+ auto request = CreateUpdateBlobsRequest(instance_name, start, end);
+
+ grpc::ClientContext context;
+ bazel_re::BatchUpdateBlobsResponse response;
+ grpc::Status status = stub_->BatchUpdateBlobs(&context, request, &response);
+
+ std::vector<bazel_re::Digest> result{};
+ if (status.ok()) {
+ result =
+ ProcessBatchResponse<bazel_re::Digest,
+ bazel_re::BatchUpdateBlobsResponse_Response>(
+ response,
+ [](std::vector<bazel_re::Digest>* v,
+ bazel_re::BatchUpdateBlobsResponse_Response const& r) {
+ v->push_back(r.digest());
+ });
+ }
+ else {
+ LogStatus(&logger_, LogLevel::Debug, status);
+ if (status.error_code() == grpc::StatusCode::RESOURCE_EXHAUSTED) {
+ logger_.Emit(LogLevel::Debug,
+ "Falling back to single blob transfers");
+ auto current = start;
+ while (current != end) {
+ if (UpdateSingleBlob(instance_name, (*current))) {
+ result.emplace_back((*current).digest);
+ }
+ ++current;
+ }
+ }
+ }
+
+ return result;
+}
+
+namespace detail {
+
+// Getter for request contents (needs specialization, never implemented)
+template <class T_Content, class T_Request>
+static auto GetRequestContents(T_Request&) noexcept
+ -> pb::RepeatedPtrField<T_Content>*;
+
+// Getter for response contents (needs specialization, never implemented)
+template <class T_Content, class T_Response>
+static auto GetResponseContents(T_Response const&) noexcept
+ -> pb::RepeatedPtrField<T_Content> const&;
+
+// Specialization of GetRequestContents for 'FindMissingBlobsRequest'
+template <>
+auto GetRequestContents<bazel_re::Digest, bazel_re::FindMissingBlobsRequest>(
+ bazel_re::FindMissingBlobsRequest& request) noexcept
+ -> pb::RepeatedPtrField<bazel_re::Digest>* {
+ return request.mutable_blob_digests();
+}
+
+// Specialization of GetRequestContents for 'BatchReadBlobsRequest'
+template <>
+auto GetRequestContents<bazel_re::Digest, bazel_re::BatchReadBlobsRequest>(
+ bazel_re::BatchReadBlobsRequest& request) noexcept
+ -> pb::RepeatedPtrField<bazel_re::Digest>* {
+ return request.mutable_digests();
+}
+
+// Specialization of GetResponseContents for 'FindMissingBlobsResponse'
+template <>
+auto GetResponseContents<bazel_re::Digest, bazel_re::FindMissingBlobsResponse>(
+ bazel_re::FindMissingBlobsResponse const& response) noexcept
+ -> pb::RepeatedPtrField<bazel_re::Digest> const& {
+ return response.missing_blob_digests();
+}
+
+// Specialization of GetResponseContents for 'GetTreeResponse'
+template <>
+auto GetResponseContents<bazel_re::Directory, bazel_re::GetTreeResponse>(
+ bazel_re::GetTreeResponse const& response) noexcept
+ -> pb::RepeatedPtrField<bazel_re::Directory> const& {
+ return response.directories();
+}
+
+} // namespace detail
+
+template <class T_Request, class T_Content, class T_OutputIter>
+auto BazelCasClient::CreateRequest(std::string const& instance_name,
+ T_OutputIter const& start,
+ T_OutputIter const& end) const noexcept
+ -> T_Request {
+ T_Request request;
+ request.set_instance_name(instance_name);
+ std::copy(
+ start,
+ end,
+ pb::back_inserter(detail::GetRequestContents<T_Content>(request)));
+ return request;
+}
+
+template <class T_OutputIter>
+auto BazelCasClient::CreateUpdateBlobsRequest(std::string const& instance_name,
+ T_OutputIter const& start,
+ T_OutputIter const& end)
+ const noexcept -> bazel_re::BatchUpdateBlobsRequest {
+ bazel_re::BatchUpdateBlobsRequest request;
+ request.set_instance_name(instance_name);
+ std::transform(start,
+ end,
+ pb::back_inserter(request.mutable_requests()),
+ [](BazelBlob const& b) {
+ return BazelCasClient::CreateUpdateBlobsSingleRequest(b);
+ });
+ return request;
+}
+
+auto BazelCasClient::CreateUpdateBlobsSingleRequest(BazelBlob const& b) noexcept
+ -> bazel_re::BatchUpdateBlobsRequest_Request {
+ bazel_re::BatchUpdateBlobsRequest_Request r{};
+ r.set_allocated_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{b.digest}});
+ r.set_data(b.data);
+ return r;
+}
+
+auto BazelCasClient::CreateGetTreeRequest(
+ std::string const& instance_name,
+ bazel_re::Digest const& root_digest,
+ int page_size,
+ std::string const& page_token) noexcept -> bazel_re::GetTreeRequest {
+ bazel_re::GetTreeRequest request;
+ request.set_instance_name(instance_name);
+ request.set_allocated_root_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{root_digest}});
+ request.set_page_size(page_size);
+ request.set_page_token(page_token);
+ return request;
+}
+
+template <class T_Content, class T_Inner, class T_Response>
+auto BazelCasClient::ProcessBatchResponse(
+ T_Response const& response,
+ std::function<void(std::vector<T_Content>*, T_Inner const&)> const&
+ inserter) const noexcept -> std::vector<T_Content> {
+ std::vector<T_Content> output;
+ for (auto const& res : response.responses()) {
+ auto const& res_status = res.status();
+ if (res_status.code() == static_cast<int>(grpc::StatusCode::OK)) {
+ inserter(&output, res);
+ }
+ else {
+ LogStatus(&logger_, LogLevel::Debug, res_status);
+ }
+ }
+ return output;
+}
+
+template <class T_Content, class T_Response>
+auto BazelCasClient::ProcessResponseContents(
+ T_Response const& response) const noexcept -> std::vector<T_Content> {
+ std::vector<T_Content> output;
+ auto const& contents = detail::GetResponseContents<T_Content>(response);
+ std::copy(contents.begin(), contents.end(), std::back_inserter(output));
+ return output;
+}
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp
new file mode 100644
index 00000000..f3d6daff
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp
@@ -0,0 +1,169 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_CAS_CLIENT_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_CAS_CLIENT_HPP
+
+#include <functional>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "build/bazel/remote/execution/v2/remote_execution.grpc.pb.h"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bytestream_client.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+/// Implements client side for serivce defined here:
+/// https://github.com/bazelbuild/bazel/blob/4b6ad34dbba15dacebfb6cbf76fa741649cdb007/third_party/remoteapis/build/bazel/remote/execution/v2/remote_execution.proto#L243
+class BazelCasClient {
+ public:
+ BazelCasClient(std::string const& server,
+ Port port,
+ std::string const& user = "",
+ std::string const& pwd = "") noexcept;
+
+ /// \brief Find missing blobs
+ /// \param[in] instance_name Name of the CAS instance
+ /// \param[in] digests The blob digests to search for
+ /// \returns The digests of blobs not found in CAS
+ [[nodiscard]] auto FindMissingBlobs(
+ std::string const& instance_name,
+ std::vector<bazel_re::Digest> const& digests) noexcept
+ -> std::vector<bazel_re::Digest>;
+
+ /// \brief Find missing blobs
+ /// \param[in] instance_name Name of the CAS instance
+ /// \param[in] digests The blob digests to search for
+ /// \returns The digests of blobs not found in CAS
+ [[nodiscard]] auto FindMissingBlobs(
+ std::string const& instance_name,
+ BlobContainer::DigestList const& digests) noexcept
+ -> std::vector<bazel_re::Digest>;
+
+ /// \brief Upload multiple blobs in batch transfer
+ /// \param[in] instance_name Name of the CAS instance
+ /// \param[in] begin Start of the blobs to upload
+ /// \param[in] end End of the blobs to upload
+ /// \returns The digests of blobs sucessfully updated
+ [[nodiscard]] auto BatchUpdateBlobs(
+ std::string const& instance_name,
+ std::vector<BazelBlob>::const_iterator const& begin,
+ std::vector<BazelBlob>::const_iterator const& end) noexcept
+ -> std::vector<bazel_re::Digest>;
+
+ /// \brief Upload multiple blobs in batch transfer
+ /// \param[in] instance_name Name of the CAS instance
+ /// \param[in] begin Start of the blobs to upload
+ /// \param[in] end End of the blobs to upload
+ /// \returns The digests of blobs sucessfully updated
+ [[nodiscard]] auto BatchUpdateBlobs(
+ std::string const& instance_name,
+ BlobContainer::iterator const& begin,
+ BlobContainer::iterator const& end) noexcept
+ -> std::vector<bazel_re::Digest>;
+
+ /// \brief Upload multiple blobs in batch transfer
+ /// \param[in] instance_name Name of the CAS instance
+ /// \param[in] begin Start of the blobs to upload
+ /// \param[in] end End of the blobs to upload
+ /// \returns The digests of blobs sucessfully updated
+ [[nodiscard]] auto BatchUpdateBlobs(
+ std::string const& instance_name,
+ BlobContainer::RelatedBlobList::iterator const& begin,
+ BlobContainer::RelatedBlobList::iterator const& end) noexcept
+ -> std::vector<bazel_re::Digest>;
+
+ /// \brief Read multiple blobs in batch transfer
+ /// \param[in] instance_name Name of the CAS instance
+ /// \param[in] begin Start of the blob digests to read
+ /// \param[in] end End of the blob digests to read
+ /// \returns The blobs sucessfully read
+ [[nodiscard]] auto BatchReadBlobs(
+ std::string const& instance_name,
+ std::vector<bazel_re::Digest>::const_iterator const& begin,
+ std::vector<bazel_re::Digest>::const_iterator const& end) noexcept
+ -> std::vector<BazelBlob>;
+
+ [[nodiscard]] auto GetTree(std::string const& instance_name,
+ bazel_re::Digest const& root_digest,
+ std::int32_t page_size,
+ std::string const& page_token = "") noexcept
+ -> std::vector<bazel_re::Directory>;
+
+ /// \brief Upload single blob via bytestream
+ /// \param[in] instance_name Name of the CAS instance
+ /// \param[in] blob The blob to upload
+ /// \returns Boolean indicating successful upload
+ [[nodiscard]] auto UpdateSingleBlob(std::string const& instance_name,
+ BazelBlob const& blob) noexcept -> bool;
+
+ /// \brief Read single blob via incremental bytestream reader
+ /// \param[in] instance_name Name of the CAS instance
+ /// \param[in] digest Blob digest to read
+ /// \returns Incremental bytestream reader.
+ [[nodiscard]] auto IncrementalReadSingleBlob(
+ std::string const& instance_name,
+ bazel_re::Digest const& digest) noexcept
+ -> ByteStreamClient::IncrementalReader;
+
+ /// \brief Read single blob via bytestream
+ /// \param[in] instance_name Name of the CAS instance
+ /// \param[in] digest Blob digest to read
+ /// \returns The blob successfully read
+ [[nodiscard]] auto ReadSingleBlob(std::string const& instance_name,
+ bazel_re::Digest const& digest) noexcept
+ -> std::optional<BazelBlob>;
+
+ private:
+ std::unique_ptr<ByteStreamClient> stream_{};
+ std::unique_ptr<bazel_re::ContentAddressableStorage::Stub> stub_;
+ Logger logger_{"BazelCasClient"};
+
+ template <class T_OutputIter>
+ [[nodiscard]] auto FindMissingBlobs(std::string const& instance_name,
+ T_OutputIter const& start,
+ T_OutputIter const& end) noexcept
+ -> std::vector<bazel_re::Digest>;
+
+ template <class T_OutputIter>
+ [[nodiscard]] auto DoBatchUpdateBlobs(std::string const& instance_name,
+ T_OutputIter const& start,
+ T_OutputIter const& end) noexcept
+ -> std::vector<bazel_re::Digest>;
+
+ template <class T_Request, class T_Content, class T_OutputIter>
+ [[nodiscard]] auto CreateRequest(std::string const& instance_name,
+ T_OutputIter const& start,
+ T_OutputIter const& end) const noexcept
+ -> T_Request;
+
+ template <class T_OutputIter>
+ [[nodiscard]] auto CreateUpdateBlobsRequest(
+ std::string const& instance_name,
+ T_OutputIter const& start,
+ T_OutputIter const& end) const noexcept
+ -> bazel_re::BatchUpdateBlobsRequest;
+
+ [[nodiscard]] static auto CreateUpdateBlobsSingleRequest(
+ BazelBlob const& b) noexcept
+ -> bazel_re::BatchUpdateBlobsRequest_Request;
+
+ [[nodiscard]] static auto CreateGetTreeRequest(
+ std::string const& instance_name,
+ bazel_re::Digest const& root_digest,
+ int page_size,
+ std::string const& page_token) noexcept -> bazel_re::GetTreeRequest;
+
+ template <class T_Content, class T_Inner, class T_Response>
+ auto ProcessBatchResponse(
+ T_Response const& response,
+ std::function<void(std::vector<T_Content>*, T_Inner const&)> const&
+ inserter) const noexcept -> std::vector<T_Content>;
+
+ template <class T_Content, class T_Response>
+ auto ProcessResponseContents(T_Response const& response) const noexcept
+ -> std::vector<T_Content>;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_CAS_CLIENT_HPP
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_client_common.hpp b/src/buildtool/execution_api/remote/bazel/bazel_client_common.hpp
new file mode 100644
index 00000000..6e37fa28
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_client_common.hpp
@@ -0,0 +1,54 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_CLIENT_COMMON_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_CLIENT_COMMON_HPP
+
+/// \file bazel_client_common.hpp
+/// \brief Common types and functions required by client implementations.
+
+#include <sstream>
+#include <string>
+
+#include "grpcpp/grpcpp.h"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+[[maybe_unused]] [[nodiscard]] static inline auto CreateChannelWithCredentials(
+ std::string const& server,
+ Port port,
+ std::string const& user = "",
+ [[maybe_unused]] std::string const& pwd = "") noexcept {
+ std::shared_ptr<grpc::ChannelCredentials> cred;
+ std::string address = server + ':' + std::to_string(port);
+ if (user.empty()) {
+ cred = grpc::InsecureChannelCredentials();
+ }
+ else {
+ // TODO(oreiche): set up authentication credentials
+ }
+ return grpc::CreateChannel(address, cred);
+}
+
+[[maybe_unused]] static inline void LogStatus(Logger const* logger,
+ LogLevel level,
+ grpc::Status const& s) noexcept {
+ if (logger == nullptr) {
+ Logger::Log(level, "{}: {}", s.error_code(), s.error_message());
+ }
+ else {
+ logger->Emit(level, "{}: {}", s.error_code(), s.error_message());
+ }
+}
+
+[[maybe_unused]] static inline void LogStatus(
+ Logger const* logger,
+ LogLevel level,
+ google::rpc::Status const& s) noexcept {
+ if (logger == nullptr) {
+ Logger::Log(level, "{}: {}", s.code(), s.message());
+ }
+ else {
+ logger->Emit(level, "{}: {}", s.code(), s.message());
+ }
+}
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_CLIENT_COMMON_HPP
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_execution_client.cpp b/src/buildtool/execution_api/remote/bazel/bazel_execution_client.cpp
new file mode 100644
index 00000000..3b95a8ff
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_execution_client.cpp
@@ -0,0 +1,129 @@
+#include "src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp"
+
+#include "grpcpp/grpcpp.h"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_client_common.hpp"
+
+namespace bazel_re = build::bazel::remote::execution::v2;
+
+BazelExecutionClient::BazelExecutionClient(std::string const& server,
+ Port port,
+ std::string const& user,
+ std::string const& pwd) noexcept {
+ stub_ = bazel_re::Execution::NewStub(
+ CreateChannelWithCredentials(server, port, user, pwd));
+}
+
+auto BazelExecutionClient::Execute(std::string const& instance_name,
+ bazel_re::Digest const& action_digest,
+ ExecutionConfiguration const& config,
+ bool wait)
+ -> BazelExecutionClient::ExecutionResponse {
+ auto execution_policy = std::make_unique<bazel_re::ExecutionPolicy>();
+ execution_policy->set_priority(config.execution_priority);
+
+ auto results_cache_policy =
+ std::make_unique<bazel_re::ResultsCachePolicy>();
+ results_cache_policy->set_priority(config.results_cache_priority);
+
+ bazel_re::ExecuteRequest request;
+ request.set_instance_name(instance_name);
+ request.set_skip_cache_lookup(config.skip_cache_lookup);
+ request.set_allocated_action_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest(action_digest)});
+ request.set_allocated_execution_policy(execution_policy.release());
+ request.set_allocated_results_cache_policy(results_cache_policy.release());
+
+ grpc::ClientContext context;
+ std::unique_ptr<grpc::ClientReader<google::longrunning::Operation>> reader(
+ stub_->Execute(&context, request));
+
+ return ExtractContents(ReadExecution(reader.get(), wait));
+}
+
+auto BazelExecutionClient::WaitExecution(std::string const& execution_handle)
+ -> BazelExecutionClient::ExecutionResponse {
+ bazel_re::WaitExecutionRequest request;
+ request.set_name(execution_handle);
+
+ grpc::ClientContext context;
+ std::unique_ptr<grpc::ClientReader<google::longrunning::Operation>> reader(
+ stub_->WaitExecution(&context, request));
+
+ return ExtractContents(ReadExecution(reader.get(), true));
+}
+
+auto BazelExecutionClient::ReadExecution(
+ grpc::ClientReader<google::longrunning::Operation>* reader,
+ bool wait) -> std::optional<google::longrunning::Operation> {
+ if (reader == nullptr) {
+ // TODO(vmoreno): log error
+ return std::nullopt;
+ }
+
+ google::longrunning::Operation operation;
+ if (not reader->Read(&operation)) {
+ grpc::Status status = reader->Finish();
+ // TODO(vmoreno): log error using data in status and operation
+ LogStatus(&logger_, LogLevel::Debug, status);
+ return std::nullopt;
+ }
+ // Important note: do not call reader->Finish() unless reader->Read()
+ // returned false, otherwise the thread will be never released
+ if (wait) {
+ while (reader->Read(&operation)) {
+ }
+ grpc::Status status = reader->Finish();
+ if (not status.ok()) {
+ // TODO(vmoreno): log error from status and operation
+ LogStatus(&logger_, LogLevel::Debug, status);
+ return std::nullopt;
+ }
+ }
+ return operation;
+}
+
+auto BazelExecutionClient::ExtractContents(
+ std::optional<google::longrunning::Operation>&& operation)
+ -> BazelExecutionClient::ExecutionResponse {
+ if (not operation) {
+ // Error was already logged in ReadExecution()
+ return ExecutionResponse::MakeEmptyFailed();
+ }
+ auto op = *operation;
+ ExecutionResponse response;
+ response.execution_handle = op.name();
+ if (not op.done()) {
+ response.state = ExecutionResponse::State::Ongoing;
+ return response;
+ }
+ if (op.has_error()) {
+ // TODO(vmoreno): log error from google::rpc::Status s = op.error()
+ LogStatus(&logger_, LogLevel::Debug, op.error());
+ response.state = ExecutionResponse::State::Failed;
+ return response;
+ }
+
+ // Get execution response Unpacked from Protobufs Any type to the actual
+ // type in our case
+ bazel_re::ExecuteResponse exec_response;
+ auto const& raw_response = op.response();
+ if (not raw_response.Is<bazel_re::ExecuteResponse>()) {
+ // Fatal error, the type should be correct
+ response.state = ExecutionResponse::State::Failed;
+ return response;
+ }
+
+ response.state = ExecutionResponse::State::Finished;
+
+ raw_response.UnpackTo(&exec_response);
+
+ ExecutionOutput output;
+ output.action_result = exec_response.result();
+ output.cached_result = exec_response.cached_result();
+ output.message = exec_response.message();
+ output.server_logs = exec_response.server_logs();
+ response.output = output;
+
+ return response;
+}
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp b/src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp
new file mode 100644
index 00000000..b213a94d
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp
@@ -0,0 +1,66 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_EXECUTION_CLIENT_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_EXECUTION_CLIENT_HPP
+
+#include <memory>
+#include <optional>
+#include <string>
+
+#include "build/bazel/remote/execution/v2/remote_execution.grpc.pb.h"
+#include "google/longrunning/operations.pb.h"
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_common.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+/// Implements client side for serivce defined here:
+/// https://github.com/bazelbuild/bazel/blob/4b6ad34dbba15dacebfb6cbf76fa741649cdb007/third_party/remoteapis/build/bazel/remote/execution/v2/remote_execution.proto#L42
+class BazelExecutionClient {
+ public:
+ struct ExecutionOutput {
+ bazel_re::ActionResult action_result{};
+ bool cached_result{};
+ grpc::Status status{};
+ // TODO(vmoreno): switch to non-google type for the map
+ google::protobuf::Map<std::string, bazel_re::LogFile> server_logs{};
+ std::string message{};
+ };
+
+ struct ExecutionResponse {
+ enum class State { Failed, Ongoing, Finished, Unknown };
+
+ std::string execution_handle{};
+ State state{State::Unknown};
+ std::optional<ExecutionOutput> output{std::nullopt};
+
+ static auto MakeEmptyFailed() -> ExecutionResponse {
+ return ExecutionResponse{
+ {}, ExecutionResponse::State::Failed, std::nullopt};
+ }
+ };
+
+ BazelExecutionClient(std::string const& server,
+ Port port,
+ std::string const& user = "",
+ std::string const& pwd = "") noexcept;
+
+ [[nodiscard]] auto Execute(std::string const& instance_name,
+ bazel_re::Digest const& action_digest,
+ ExecutionConfiguration const& config,
+ bool wait) -> ExecutionResponse;
+
+ [[nodiscard]] auto WaitExecution(std::string const& execution_handle)
+ -> ExecutionResponse;
+
+ private:
+ std::unique_ptr<bazel_re::Execution::Stub> stub_;
+ Logger logger_{"BazelExecutionClient"};
+
+ [[nodiscard]] auto ReadExecution(
+ grpc::ClientReader<google::longrunning::Operation>* reader,
+ bool wait) -> std::optional<google::longrunning::Operation>;
+
+ [[nodiscard]] auto ExtractContents(
+ std::optional<google::longrunning::Operation>&& operation)
+ -> ExecutionResponse;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_EXECUTION_CLIENT_HPP
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_network.cpp b/src/buildtool/execution_api/remote/bazel/bazel_network.cpp
new file mode 100644
index 00000000..ad160d0c
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_network.cpp
@@ -0,0 +1,327 @@
+#include "src/buildtool/execution_api/remote/bazel/bazel_network.hpp"
+
+#include "src/buildtool/execution_api/remote/bazel/bazel_client_common.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_response.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+namespace {
+
+[[nodiscard]] auto ReadDirectory(
+ gsl::not_null<BazelNetwork const*> const& network,
+ bazel_re::Digest const& digest) noexcept
+ -> std::optional<bazel_re::Directory> {
+ auto blobs = network->ReadBlobs({digest}).Next();
+ if (blobs.size() == 1) {
+ return BazelMsgFactory::MessageFromString<bazel_re::Directory>(
+ blobs.at(0).data);
+ }
+ Logger::Log(
+ LogLevel::Error, "Directory {} not found in CAS", digest.hash());
+ return std::nullopt;
+}
+
+[[nodiscard]] auto TreeToStream(
+ gsl::not_null<BazelNetwork const*> const& network,
+ bazel_re::Digest const& tree_digest,
+ gsl::not_null<FILE*> const& stream) noexcept -> bool {
+ if (auto dir = ReadDirectory(network, tree_digest)) {
+ if (auto data = BazelMsgFactory::DirectoryToString(*dir)) {
+ std::fwrite(data->data(), 1, data->size(), stream);
+ return true;
+ }
+ }
+ return false;
+}
+
+[[nodiscard]] auto BlobToStream(
+ gsl::not_null<BazelNetwork const*> const& network,
+ bazel_re::Digest const& blob_digest,
+ gsl::not_null<FILE*> const& stream) noexcept -> bool {
+ auto reader = network->IncrementalReadSingleBlob(blob_digest);
+ auto data = reader.Next();
+ while (data and not data->empty()) {
+ std::fwrite(data->data(), 1, data->size(), stream);
+ data = reader.Next();
+ }
+ return data.has_value();
+}
+
+} // namespace
+
+BazelNetwork::BazelNetwork(std::string instance_name,
+ std::string const& host,
+ Port port,
+ ExecutionConfiguration const& exec_config,
+ std::shared_ptr<LocalTreeMap> tree_map) noexcept
+ : instance_name_{std::move(instance_name)},
+ exec_config_{exec_config},
+ cas_{std::make_unique<BazelCasClient>(host, port)},
+ ac_{std::make_unique<BazelAcClient>(host, port)},
+ exec_{std::make_unique<BazelExecutionClient>(host, port)},
+ tree_map_{std::move(tree_map)} {}
+
+auto BazelNetwork::IsAvailable(bazel_re::Digest const& digest) const noexcept
+ -> bool {
+ return cas_
+ ->FindMissingBlobs(instance_name_,
+ std::vector<bazel_re::Digest>{digest})
+ .empty();
+}
+
+template <class T_Iter>
+auto BazelNetwork::DoUploadBlobs(T_Iter const& first,
+ T_Iter const& last) noexcept -> bool {
+ auto num_blobs = gsl::narrow<std::size_t>(std::distance(first, last));
+
+ std::vector<bazel_re::Digest> digests{};
+ digests.reserve(num_blobs);
+
+ auto begin = first;
+ auto current = first;
+ std::size_t transfer_size{};
+ while (current != last) {
+ auto const& blob = *current;
+ transfer_size += blob.data.size();
+ if (transfer_size > kMaxBatchTransferSize) {
+ if (begin == current) {
+ if (cas_->UpdateSingleBlob(instance_name_, blob)) {
+ digests.emplace_back(blob.digest);
+ }
+ ++current;
+ }
+ else {
+ for (auto& digest :
+ cas_->BatchUpdateBlobs(instance_name_, begin, current)) {
+ digests.emplace_back(std::move(digest));
+ }
+ }
+ begin = current;
+ transfer_size = 0;
+ }
+ else {
+ ++current;
+ }
+ }
+ if (begin != current) {
+ for (auto& digest :
+ cas_->BatchUpdateBlobs(instance_name_, begin, current)) {
+ digests.emplace_back(std::move(digest));
+ }
+ }
+
+ if (digests.size() != num_blobs) {
+ Logger::Log(LogLevel::Warning, "Failed to update all blobs");
+ return false;
+ }
+
+ return true;
+}
+
+auto BazelNetwork::UploadBlobs(BlobContainer const& blobs,
+ bool skip_find_missing) noexcept -> bool {
+ if (skip_find_missing) {
+ return DoUploadBlobs(blobs.begin(), blobs.end());
+ }
+
+ // find digests of blobs missing in CAS
+ auto missing_digests =
+ cas_->FindMissingBlobs(instance_name_, blobs.Digests());
+
+ if (not missing_digests.empty()) {
+ // update missing blobs
+ auto missing_blobs = blobs.RelatedBlobs(missing_digests);
+ return DoUploadBlobs(missing_blobs.begin(), missing_blobs.end());
+ }
+ return true;
+}
+
+auto BazelNetwork::ExecuteBazelActionSync(
+ bazel_re::Digest const& action) noexcept
+ -> std::optional<BazelExecutionClient::ExecutionOutput> {
+ auto response =
+ exec_->Execute(instance_name_, action, exec_config_, true /*wait*/);
+
+ if (response.state !=
+ BazelExecutionClient::ExecutionResponse::State::Finished or
+ not response.output) {
+ // TODO(oreiche): logging
+ return std::nullopt;
+ }
+
+ return response.output;
+}
+
+auto BazelNetwork::BlobReader::Next() noexcept -> std::vector<BazelBlob> {
+ std::size_t size{};
+ std::vector<BazelBlob> blobs{};
+
+ while (current_ != ids_.end()) {
+ size += gsl::narrow<std::size_t>(current_->size_bytes());
+ if (size > kMaxBatchTransferSize) {
+ if (begin_ == current_) {
+ auto blob = cas_->ReadSingleBlob(instance_name_, *begin_);
+ if (blob) {
+ blobs.emplace_back(std::move(*blob));
+ }
+ ++current_;
+ }
+ else {
+ blobs = cas_->BatchReadBlobs(instance_name_, begin_, current_);
+ }
+ begin_ = current_;
+ break;
+ }
+ ++current_;
+ }
+
+ if (begin_ != current_) {
+ blobs = cas_->BatchReadBlobs(instance_name_, begin_, current_);
+ begin_ = current_;
+ }
+
+ return blobs;
+}
+
+auto BazelNetwork::ReadBlobs(std::vector<bazel_re::Digest> ids) const noexcept
+ -> BlobReader {
+ return BlobReader{instance_name_, cas_.get(), std::move(ids)};
+}
+
+auto BazelNetwork::IncrementalReadSingleBlob(bazel_re::Digest const& id)
+ const noexcept -> ByteStreamClient::IncrementalReader {
+ return cas_->IncrementalReadSingleBlob(instance_name_, id);
+}
+
+auto BazelNetwork::GetCachedActionResult(
+ bazel_re::Digest const& action,
+ std::vector<std::string> const& output_files) const noexcept
+ -> std::optional<bazel_re::ActionResult> {
+ return ac_->GetActionResult(
+ instance_name_, action, false, false, output_files);
+}
+
+auto BazelNetwork::ReadTreeInfos(bazel_re::Digest const& tree_digest,
+ std::filesystem::path const& parent,
+ bool request_remote_tree) const noexcept
+ -> std::optional<std::pair<std::vector<std::filesystem::path>,
+ std::vector<Artifact::ObjectInfo>>> {
+ std::optional<DirectoryMap> dir_map{std::nullopt};
+ if (request_remote_tree or not tree_map_) {
+ // Query full tree from remote CAS. Note that this is currently not
+ // supported by Buildbarn revision c3c06bbe2a.
+ auto dirs =
+ cas_->GetTree(instance_name_, tree_digest, kMaxBatchTransferSize);
+
+ // Convert to Directory map
+ dir_map = DirectoryMap{};
+ dir_map->reserve(dirs.size());
+ for (auto& dir : dirs) {
+ try {
+ dir_map->emplace(
+ ArtifactDigest::Create(dir.SerializeAsString()),
+ std::move(dir));
+ } catch (...) {
+ return std::nullopt;
+ }
+ }
+ }
+
+ std::vector<std::filesystem::path> paths{};
+ std::vector<Artifact::ObjectInfo> infos{};
+
+ auto store_info = [&paths, &infos](auto path, auto info) {
+ paths.emplace_back(path);
+ infos.emplace_back(info);
+ return true;
+ };
+
+ if (ReadObjectInfosRecursively(dir_map, store_info, parent, tree_digest)) {
+ return std::make_pair(std::move(paths), std::move(infos));
+ }
+
+ return std::nullopt;
+}
+
+auto BazelNetwork::ReadObjectInfosRecursively(
+ std::optional<DirectoryMap> const& dir_map,
+ BazelMsgFactory::InfoStoreFunc const& store_info,
+ std::filesystem::path const& parent,
+ bazel_re::Digest const& digest) const noexcept -> bool {
+ // read from in-memory tree map
+ if (tree_map_) {
+ auto const* tree = tree_map_->GetTree(digest);
+ if (tree != nullptr) {
+ for (auto const& [path, info] : *tree) {
+ try {
+ if (IsTreeObject(info->type)
+ ? not ReadObjectInfosRecursively(dir_map,
+ store_info,
+ parent / path,
+ info->digest)
+ : not store_info(parent / path, *info)) {
+ return false;
+ }
+ } catch (...) { // satisfy clang-tidy, store_info() could throw
+ return false;
+ }
+ }
+ return true;
+ }
+ Logger::Log(
+ LogLevel::Debug, "tree {} not found in tree map", digest.hash());
+ }
+
+ // read from in-memory Directory map and cache it in in-memory tree map
+ if (dir_map) {
+ if (dir_map->contains(digest)) {
+ auto tree = tree_map_ ? std::make_optional(tree_map_->CreateTree())
+ : std::nullopt;
+ return BazelMsgFactory::ReadObjectInfosFromDirectory(
+ dir_map->at(digest),
+ [this, &dir_map, &store_info, &parent, &tree](
+ auto path, auto info) {
+ return IsTreeObject(info.type)
+ ? (not tree or
+ tree->AddInfo(path, info)) and
+ ReadObjectInfosRecursively(
+ dir_map,
+ store_info,
+ parent / path,
+ info.digest)
+ : store_info(parent / path, info);
+ }) and
+ (not tree_map_ or
+ tree_map_->AddTree(digest, std::move(*tree)));
+ }
+ Logger::Log(
+ LogLevel::Debug, "tree {} not found in dir map", digest.hash());
+ }
+
+ // fallback read from CAS and cache it in in-memory tree map
+ if (auto dir = ReadDirectory(this, digest)) {
+ auto tree = tree_map_ ? std::make_optional(tree_map_->CreateTree())
+ : std::nullopt;
+ return BazelMsgFactory::ReadObjectInfosFromDirectory(
+ *dir,
+ [this, &dir_map, &store_info, &parent, &tree](auto path,
+ auto info) {
+ return IsTreeObject(info.type)
+ ? (not tree or tree->AddInfo(path, info)) and
+ ReadObjectInfosRecursively(
+ dir_map,
+ store_info,
+ parent / path,
+ info.digest)
+ : store_info(parent / path, info);
+ }) and
+ (not tree_map_ or tree_map_->AddTree(digest, std::move(*tree)));
+ }
+ return false;
+}
+
+auto BazelNetwork::DumpToStream(
+ Artifact::ObjectInfo const& info,
+ gsl::not_null<FILE*> const& stream) const noexcept -> bool {
+ return IsTreeObject(info.type) ? TreeToStream(this, info.digest, stream)
+ : BlobToStream(this, info.digest, stream);
+}
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_network.hpp b/src/buildtool/execution_api/remote/bazel/bazel_network.hpp
new file mode 100644
index 00000000..644af2b4
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_network.hpp
@@ -0,0 +1,118 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_NETWORK_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_NETWORK_HPP
+
+#include <memory>
+#include <optional>
+#include <unordered_map>
+
+#include "src/buildtool/common/bazel_types.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp"
+#include "src/buildtool/execution_api/common/execution_api.hpp"
+#include "src/buildtool/execution_api/common/local_tree_map.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_ac_client.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp"
+
+/// \brief Contains all network clients and is responsible for all network IO.
+class BazelNetwork {
+ public:
+ class BlobReader {
+ friend class BazelNetwork;
+
+ public:
+ // Obtain the next batch of blobs that can be transferred in a single
+ // request.
+ [[nodiscard]] auto Next() noexcept -> std::vector<BazelBlob>;
+
+ private:
+ std::string instance_name_;
+ gsl::not_null<BazelCasClient*> cas_;
+ std::vector<bazel_re::Digest> const ids_;
+ std::vector<bazel_re::Digest>::const_iterator begin_;
+ std::vector<bazel_re::Digest>::const_iterator current_;
+
+ BlobReader(std::string instance_name,
+ gsl::not_null<BazelCasClient*> cas,
+ std::vector<bazel_re::Digest> ids)
+ : instance_name_{std::move(instance_name)},
+ cas_{std::move(cas)},
+ ids_{std::move(ids)},
+ begin_{ids_.begin()},
+ current_{begin_} {};
+ };
+
+ BazelNetwork(std::string instance_name,
+ std::string const& host,
+ Port port,
+ ExecutionConfiguration const& exec_config,
+ std::shared_ptr<LocalTreeMap> tree_map = nullptr) noexcept;
+
+ /// \brief Check if digest exists in CAS
+ /// \param[in] digest The digest to look up
+ /// \returns True if digest exists in CAS, false otherwise
+ [[nodiscard]] auto IsAvailable(
+ bazel_re::Digest const& digest) const noexcept -> bool;
+
+ /// \brief Uploads blobs to CAS
+ /// \param blobs The blobs to upload
+ /// \param skip_find_missing Skip finding missing blobs, just upload all
+ /// \returns True if upload was successful, false otherwise
+ [[nodiscard]] auto UploadBlobs(BlobContainer const& blobs,
+ bool skip_find_missing = false) noexcept
+ -> bool;
+
+ [[nodiscard]] auto ExecuteBazelActionSync(
+ bazel_re::Digest const& action) noexcept
+ -> std::optional<BazelExecutionClient::ExecutionOutput>;
+
+ [[nodiscard]] auto ReadBlobs(
+ std::vector<bazel_re::Digest> ids) const noexcept -> BlobReader;
+
+ [[nodiscard]] auto IncrementalReadSingleBlob(bazel_re::Digest const& id)
+ const noexcept -> ByteStreamClient::IncrementalReader;
+
+ [[nodiscard]] auto GetCachedActionResult(
+ bazel_re::Digest const& action,
+ std::vector<std::string> const& output_files) const noexcept
+ -> std::optional<bazel_re::ActionResult>;
+
+ [[nodiscard]] auto ReadTreeInfos(
+ bazel_re::Digest const& tree_digest,
+ std::filesystem::path const& parent,
+ bool request_remote_tree = false) const noexcept
+ -> std::optional<std::pair<std::vector<std::filesystem::path>,
+ std::vector<Artifact::ObjectInfo>>>;
+
+ [[nodiscard]] auto DumpToStream(
+ Artifact::ObjectInfo const& info,
+ gsl::not_null<FILE*> const& stream) const noexcept -> bool;
+
+ private:
+ using DirectoryMap =
+ std::unordered_map<bazel_re::Digest, bazel_re::Directory>;
+
+ // Max size for batch transfers
+ static constexpr std::size_t kMaxBatchTransferSize = 3 * 1024 * 1024;
+ static_assert(kMaxBatchTransferSize < GRPC_DEFAULT_MAX_RECV_MESSAGE_LENGTH,
+ "Max batch transfer size too large.");
+
+ std::string const instance_name_{};
+ ExecutionConfiguration exec_config_{};
+ std::unique_ptr<BazelCasClient> cas_{};
+ std::unique_ptr<BazelAcClient> ac_{};
+ std::unique_ptr<BazelExecutionClient> exec_{};
+ std::shared_ptr<LocalTreeMap> tree_map_{};
+
+ template <class T_Iter>
+ [[nodiscard]] auto DoUploadBlobs(T_Iter const& first,
+ T_Iter const& last) noexcept -> bool;
+
+ [[nodiscard]] auto ReadObjectInfosRecursively(
+ std::optional<DirectoryMap> const& dir_map,
+ BazelMsgFactory::InfoStoreFunc const& store_info,
+ std::filesystem::path const& parent,
+ bazel_re::Digest const& digest) const noexcept -> bool;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_NETWORK_HPP
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_response.cpp b/src/buildtool/execution_api/remote/bazel/bazel_response.cpp
new file mode 100644
index 00000000..8b75a767
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_response.cpp
@@ -0,0 +1,125 @@
+#include "src/buildtool/execution_api/remote/bazel/bazel_response.hpp"
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+auto BazelResponse::ReadStringBlob(bazel_re::Digest const& id) noexcept
+ -> std::string {
+ auto blobs = network_->ReadBlobs({id}).Next();
+ if (blobs.empty()) {
+ // TODO(oreiche): logging
+ return std::string{};
+ }
+ return blobs[0].data;
+}
+
+auto BazelResponse::Artifacts() const noexcept -> ArtifactInfos {
+ ArtifactInfos artifacts{};
+ auto const& action_result = output_.action_result;
+ artifacts.reserve(
+ static_cast<std::size_t>(action_result.output_files().size()));
+
+ // collect files and store them
+ for (auto const& file : action_result.output_files()) {
+ try {
+ artifacts.emplace(file.path(),
+ Artifact::ObjectInfo{
+ ArtifactDigest{file.digest()},
+ file.is_executable() ? ObjectType::Executable
+ : ObjectType::File});
+ } catch (...) {
+ return {};
+ }
+ }
+
+ // obtain tree digests for output directories
+ std::vector<bazel_re::Digest> tree_digests{};
+ tree_digests.reserve(
+ gsl::narrow<std::size_t>(action_result.output_directories_size()));
+ std::transform(action_result.output_directories().begin(),
+ action_result.output_directories().end(),
+ std::back_inserter(tree_digests),
+ [](auto dir) { return dir.tree_digest(); });
+
+ // collect root digests from trees and store them
+ auto blob_reader = network_->ReadBlobs(tree_digests);
+ auto tree_blobs = blob_reader.Next();
+ std::size_t pos{};
+ while (not tree_blobs.empty()) {
+ for (auto const& tree_blob : tree_blobs) {
+ try {
+ auto tree = BazelMsgFactory::MessageFromString<bazel_re::Tree>(
+ tree_blob.data);
+ if (not tree) {
+ return {};
+ }
+
+ // The server does not store the Directory messages it just has
+ // sent us as part of the Tree message. If we want to be able to
+ // use the Directories as inputs for actions, we have to upload
+ // them manually.
+ auto root_digest = UploadTreeMessageDirectories(*tree);
+ if (not root_digest) {
+ return {};
+ }
+ artifacts.emplace(
+ action_result.output_directories(pos).path(),
+ Artifact::ObjectInfo{*root_digest, ObjectType::Tree});
+ } catch (...) {
+ return {};
+ }
+ ++pos;
+ }
+ tree_blobs = blob_reader.Next();
+ }
+ return artifacts;
+}
+
+auto BazelResponse::UploadTreeMessageDirectories(
+ bazel_re::Tree const& tree) const -> std::optional<ArtifactDigest> {
+ BlobContainer dir_blobs{};
+
+ auto rootdir_blob = ProcessDirectoryMessage(tree.root());
+ if (not rootdir_blob) {
+ return std::nullopt;
+ }
+ auto root_digest = rootdir_blob->digest;
+ dir_blobs.Emplace(std::move(*rootdir_blob));
+
+ for (auto const& subdir : tree.children()) {
+ auto subdir_blob = ProcessDirectoryMessage(subdir);
+ if (not subdir_blob) {
+ return std::nullopt;
+ }
+ dir_blobs.Emplace(std::move(*subdir_blob));
+ }
+
+ if (not network_->UploadBlobs(dir_blobs)) {
+ Logger::Log(LogLevel::Error,
+ "uploading Tree's Directory messages failed");
+ return std::nullopt;
+ }
+ return ArtifactDigest{root_digest};
+}
+
+auto BazelResponse::ProcessDirectoryMessage(
+ bazel_re::Directory const& dir) const noexcept -> std::optional<BazelBlob> {
+ auto data = dir.SerializeAsString();
+ auto digest = ArtifactDigest::Create(data);
+
+ if (tree_map_ and not tree_map_->HasTree(digest)) {
+ // cache in local tree map
+ auto tree = tree_map_->CreateTree();
+ if (not BazelMsgFactory::ReadObjectInfosFromDirectory(
+ dir,
+ [&tree](auto path, auto info) {
+ return tree.AddInfo(path, info);
+ }) or
+ not tree_map_->AddTree(digest, std::move(tree))) {
+ return std::nullopt;
+ }
+ }
+
+ return BazelBlob{std::move(digest), std::move(data)};
+}
diff --git a/src/buildtool/execution_api/remote/bazel/bazel_response.hpp b/src/buildtool/execution_api/remote/bazel/bazel_response.hpp
new file mode 100644
index 00000000..778efa0a
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bazel_response.hpp
@@ -0,0 +1,77 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_RESPONSE_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_RESPONSE_HPP
+
+#include <string>
+#include <vector>
+
+#include "src/buildtool/execution_api/common/execution_api.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_network.hpp"
+
+class BazelAction;
+
+/// \brief Bazel implementation of the abstract Execution Response.
+/// Access Bazel execution output data and obtain a Bazel Artifact.
+class BazelResponse final : public IExecutionResponse {
+ friend class BazelAction;
+
+ public:
+ auto Status() const noexcept -> StatusCode final {
+ return output_.status.ok() ? StatusCode::Success : StatusCode::Failed;
+ }
+ auto HasStdErr() const noexcept -> bool final {
+ return IsDigestNotEmpty(output_.action_result.stderr_digest());
+ }
+ auto HasStdOut() const noexcept -> bool final {
+ return IsDigestNotEmpty(output_.action_result.stdout_digest());
+ }
+ auto StdErr() noexcept -> std::string final {
+ return ReadStringBlob(output_.action_result.stderr_digest());
+ }
+ auto StdOut() noexcept -> std::string final {
+ return ReadStringBlob(output_.action_result.stdout_digest());
+ }
+ auto ExitCode() const noexcept -> int final {
+ return output_.action_result.exit_code();
+ }
+ auto IsCached() const noexcept -> bool final {
+ return output_.cached_result;
+ };
+
+ auto ActionDigest() const noexcept -> std::string final {
+ return action_id_;
+ }
+
+ auto Artifacts() const noexcept -> ArtifactInfos final;
+
+ private:
+ std::string action_id_{};
+ std::shared_ptr<BazelNetwork> const network_{};
+ std::shared_ptr<LocalTreeMap> const tree_map_{};
+ BazelExecutionClient::ExecutionOutput output_{};
+
+ BazelResponse(std::string action_id,
+ std::shared_ptr<BazelNetwork> network,
+ std::shared_ptr<LocalTreeMap> tree_map,
+ BazelExecutionClient::ExecutionOutput output)
+ : action_id_{std::move(action_id)},
+ network_{std::move(network)},
+ tree_map_{std::move(tree_map)},
+ output_{std::move(output)} {}
+
+ [[nodiscard]] auto ReadStringBlob(bazel_re::Digest const& id) noexcept
+ -> std::string;
+
+ [[nodiscard]] static auto IsDigestNotEmpty(bazel_re::Digest const& id)
+ -> bool {
+ return id.size_bytes() != 0;
+ }
+
+ [[nodiscard]] auto UploadTreeMessageDirectories(
+ bazel_re::Tree const& tree) const -> std::optional<ArtifactDigest>;
+
+ [[nodiscard]] auto ProcessDirectoryMessage(bazel_re::Directory const& dir)
+ const noexcept -> std::optional<BazelBlob>;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BAZEL_RESPONSE_HPP
diff --git a/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp b/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp
new file mode 100644
index 00000000..b8823236
--- /dev/null
+++ b/src/buildtool/execution_api/remote/bazel/bytestream_client.hpp
@@ -0,0 +1,185 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BYTESTREAM_CLIENT_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BYTESTREAM_CLIENT_HPP
+
+#include <functional>
+#include <iomanip>
+#include <optional>
+#include <string>
+#include <vector>
+
+#include "google/bytestream/bytestream.grpc.pb.h"
+#include "src/buildtool/execution_api/remote/bazel/bazel_client_common.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+/// Implements client side for google.bytestream.ByteStream service.
+class ByteStreamClient {
+ public:
+ class IncrementalReader {
+ friend class ByteStreamClient;
+
+ public:
+ /// \brief Read next chunk of data.
+ /// \returns empty string if stream finished and std::nullopt on error.
+ [[nodiscard]] auto Next() -> std::optional<std::string> {
+ google::bytestream::ReadResponse response{};
+ if (reader_->Read(&response)) {
+ return std::move(*response.mutable_data());
+ }
+
+ if (not finished_) {
+ auto status = reader_->Finish();
+ if (not status.ok()) {
+ LogStatus(logger_, LogLevel::Debug, status);
+ return std::nullopt;
+ }
+ finished_ = true;
+ }
+ return std::string{};
+ }
+
+ private:
+ Logger const* logger_;
+ grpc::ClientContext ctx_;
+ std::unique_ptr<grpc::ClientReader<google::bytestream::ReadResponse>>
+ reader_;
+ bool finished_{false};
+
+ IncrementalReader(
+ gsl::not_null<google::bytestream::ByteStream::Stub*> const& stub,
+ Logger const* logger,
+ std::string const& resource_name)
+ : logger_{logger} {
+ google::bytestream::ReadRequest request{};
+ request.set_resource_name(resource_name);
+ reader_ = stub->Read(&ctx_, request);
+ }
+ };
+
+ ByteStreamClient(std::string const& server,
+ Port port,
+ std::string const& user = "",
+ std::string const& pwd = "") noexcept {
+ stub_ = google::bytestream::ByteStream::NewStub(
+ CreateChannelWithCredentials(server, port, user, pwd));
+ }
+
+ [[nodiscard]] auto IncrementalRead(
+ std::string const& resource_name) const noexcept -> IncrementalReader {
+ return IncrementalReader{stub_.get(), &logger_, resource_name};
+ }
+
+ [[nodiscard]] auto Read(std::string const& resource_name) const noexcept
+ -> std::optional<std::string> {
+ auto reader = IncrementalRead(resource_name);
+ std::string output{};
+ auto data = reader.Next();
+ while (data and not data->empty()) {
+ output.append(data->begin(), data->end());
+ data = reader.Next();
+ }
+ if (not data) {
+ return std::nullopt;
+ }
+ return output;
+ }
+
+ [[nodiscard]] auto Write(std::string const& resource_name,
+ std::string const& data) const noexcept -> bool {
+ grpc::ClientContext ctx;
+ google::bytestream::WriteResponse response{};
+ auto writer = stub_->Write(&ctx, &response);
+
+ auto* allocated_data =
+ std::make_unique<std::string>(kChunkSize, '\0').release();
+ google::bytestream::WriteRequest request{};
+ request.set_resource_name(resource_name);
+ request.set_allocated_data(allocated_data);
+ std::size_t pos{};
+ do {
+ auto const size = std::min(data.size() - pos, kChunkSize);
+ allocated_data->resize(size);
+ data.copy(allocated_data->data(), size, pos);
+ request.set_write_offset(static_cast<int>(pos));
+ request.set_finish_write(pos + size >= data.size());
+ if (not writer->Write(request)) {
+ // According to the docs, quote:
+ // If there is an error or the connection is broken during the
+ // `Write()`, the client should check the status of the
+ // `Write()` by calling `QueryWriteStatus()` and continue
+ // writing from the returned `committed_size`.
+ auto const committed_size = QueryWriteStatus(resource_name);
+ if (committed_size <= 0) {
+ logger_.Emit(LogLevel::Debug,
+ "broken stream for upload to resource name {}",
+ resource_name);
+ return false;
+ }
+ pos = gsl::narrow<std::size_t>(committed_size);
+ }
+ else {
+ pos += kChunkSize;
+ }
+ } while (pos < data.size());
+ if (not writer->WritesDone()) {
+ logger_.Emit(LogLevel::Debug,
+ "broken stream for upload to resource name {}",
+ resource_name);
+ return false;
+ }
+
+ auto status = writer->Finish();
+ if (not status.ok()) {
+ LogStatus(&logger_, LogLevel::Debug, status);
+ return false;
+ }
+
+ return gsl::narrow<std::size_t>(response.committed_size()) ==
+ data.size();
+ }
+
+ template <class T_Input>
+ void ReadMany(
+ std::vector<T_Input> const& inputs,
+ std::function<std::string(T_Input const&)> const& to_resource_name,
+ std::function<void(std::string)> const& parse_data) const noexcept {
+ for (auto const& i : inputs) {
+ auto data = Read(to_resource_name(i));
+ if (data) {
+ parse_data(std::move(*data));
+ }
+ }
+ }
+
+ template <class T_Input>
+ [[nodiscard]] auto WriteMany(
+ std::vector<T_Input> const& inputs,
+ std::function<std::string(T_Input const&)> const& to_resource_name,
+ std::function<std::string(T_Input const&)> const& to_data)
+ const noexcept -> bool {
+ for (auto const& i : inputs) {
+ if (not Write(to_resource_name(i), to_data(i))) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private:
+ // Chunk size for uploads (default size used by BuildBarn)
+ constexpr static std::size_t kChunkSize = 64 * 1024;
+
+ std::unique_ptr<google::bytestream::ByteStream::Stub> stub_;
+ Logger logger_{"ByteStreamClient"};
+
+ [[nodiscard]] auto QueryWriteStatus(
+ std::string const& resource_name) const noexcept -> std::int64_t {
+ grpc::ClientContext ctx;
+ google::bytestream::QueryWriteStatusRequest request{};
+ request.set_resource_name(resource_name);
+ google::bytestream::QueryWriteStatusResponse response{};
+ stub_->QueryWriteStatus(&ctx, request, &response);
+ return response.committed_size();
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_BAZEL_BYTESTREAM_CLIENT_HPP
diff --git a/src/buildtool/execution_api/remote/config.hpp b/src/buildtool/execution_api/remote/config.hpp
new file mode 100644
index 00000000..e29b8aa7
--- /dev/null
+++ b/src/buildtool/execution_api/remote/config.hpp
@@ -0,0 +1,72 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_CONFIG_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_CONFIG_HPP
+
+#include <map>
+#include <memory>
+#include <optional>
+#include <sstream>
+#include <stdexcept>
+#include <string>
+#include <unordered_map>
+#include <utility>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+class RemoteExecutionConfig {
+ public:
+ [[nodiscard]] static auto ParseAddress(std::string const& address) noexcept
+ -> std::optional<std::pair<std::string, int>> {
+ std::istringstream iss(address);
+ std::string host;
+ std::string port;
+ if (not std::getline(iss, host, ':') or
+ not std::getline(iss, port, ':')) {
+ return std::nullopt;
+ }
+ try {
+ return std::make_pair(host, std::stoi(port));
+ } catch (std::out_of_range const& e) {
+ Logger::Log(LogLevel::Error, "Port raised out_of_range exception.");
+ return std::nullopt;
+ } catch (std::invalid_argument const& e) {
+ Logger::Log(LogLevel::Error,
+ "Port raised invalid_argument exception.");
+ return std::nullopt;
+ }
+ }
+
+ // Obtain global instance
+ [[nodiscard]] static auto Instance() noexcept -> RemoteExecutionConfig& {
+ static RemoteExecutionConfig config;
+ return config;
+ }
+
+ [[nodiscard]] auto IsValidAddress() const noexcept -> bool {
+ return valid_;
+ }
+
+ [[nodiscard]] auto SetAddress(std::string const& address) noexcept -> bool {
+ auto pair = ParseAddress(address);
+ return pair and SetAddress(pair->first, pair->second);
+ }
+
+ [[nodiscard]] auto SetAddress(std::string const& host, int port) noexcept
+ -> bool {
+ host_ = host;
+ port_ = port,
+ valid_ = (not host.empty() and port >= 0 and port <= kMaxPortNumber);
+ return valid_;
+ }
+
+ [[nodiscard]] auto Host() const noexcept -> std::string { return host_; }
+ [[nodiscard]] auto Port() const noexcept -> int { return port_; }
+
+ private:
+ static constexpr int kMaxPortNumber{std::numeric_limits<uint16_t>::max()};
+ std::string host_{};
+ int port_{};
+ bool valid_{false};
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_API_REMOTE_CONFIG_HPP
diff --git a/src/buildtool/execution_engine/TARGETS b/src/buildtool/execution_engine/TARGETS
new file mode 100644
index 00000000..9e26dfee
--- /dev/null
+++ b/src/buildtool/execution_engine/TARGETS
@@ -0,0 +1 @@
+{} \ No newline at end of file
diff --git a/src/buildtool/execution_engine/dag/TARGETS b/src/buildtool/execution_engine/dag/TARGETS
new file mode 100644
index 00000000..3c46f75f
--- /dev/null
+++ b/src/buildtool/execution_engine/dag/TARGETS
@@ -0,0 +1,17 @@
+{ "dag":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["dag"]
+ , "hdrs": ["dag.hpp"]
+ , "srcs": ["dag.cpp"]
+ , "deps":
+ [ ["src/utils/cpp", "type_safe_arithmetic"]
+ , ["src/buildtool/common", "common"]
+ , ["src/buildtool/common", "action_description"]
+ , ["src/buildtool/common", "artifact_description"]
+ , ["src/buildtool/file_system", "object_type"]
+ , ["src/buildtool/logging", "logging"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "execution_engine", "dag"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/execution_engine/dag/dag.cpp b/src/buildtool/execution_engine/dag/dag.cpp
new file mode 100644
index 00000000..96a74650
--- /dev/null
+++ b/src/buildtool/execution_engine/dag/dag.cpp
@@ -0,0 +1,263 @@
+#include "src/buildtool/execution_engine/dag/dag.hpp"
+
+#include "src/buildtool/common/artifact_description.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+auto DependencyGraph::CreateOutputArtifactNodes(
+ std::string const& action_id,
+ std::vector<std::string> const& file_paths,
+ std::vector<std::string> const& dir_paths,
+ bool is_tree_action)
+ -> std::pair<std::vector<DependencyGraph::NamedArtifactNodePtr>,
+ std::vector<DependencyGraph::NamedArtifactNodePtr>> {
+ if (is_tree_action) { // create tree artifact
+ auto artifact = ArtifactDescription{action_id}.ToArtifact();
+ auto const node_id = AddArtifact(std::move(artifact));
+ return std::make_pair(std::vector<NamedArtifactNodePtr>{},
+ std::vector<NamedArtifactNodePtr>{
+ {{}, &(*artifact_nodes_[node_id])}});
+ }
+
+ // create action artifacts
+ auto node_creator = [this, &action_id](auto* nodes, auto const& paths) {
+ for (auto const& artifact_path : paths) {
+ auto artifact =
+ ArtifactDescription{action_id,
+ std::filesystem::path{artifact_path}}
+ .ToArtifact();
+ auto const node_id = AddArtifact(std::move(artifact));
+ nodes->emplace_back(NamedArtifactNodePtr{
+ artifact_path, &(*artifact_nodes_[node_id])});
+ }
+ };
+
+ std::vector<NamedArtifactNodePtr> file_nodes{};
+ file_nodes.reserve(file_paths.size());
+ node_creator(&file_nodes, file_paths);
+
+ std::vector<NamedArtifactNodePtr> dir_nodes{};
+ dir_nodes.reserve(dir_paths.size());
+ node_creator(&dir_nodes, dir_paths);
+
+ return std::make_pair(std::move(file_nodes), std::move(dir_nodes));
+}
+
+auto DependencyGraph::CreateInputArtifactNodes(
+ ActionDescription::inputs_t const& inputs)
+ -> std::optional<std::vector<DependencyGraph::NamedArtifactNodePtr>> {
+ std::vector<NamedArtifactNodePtr> nodes{};
+
+ for (auto const& [local_path, artifact_desc] : inputs) {
+ auto artifact = artifact_desc.ToArtifact();
+ auto const node_id = AddArtifact(std::move(artifact));
+ nodes.push_back({local_path, &(*artifact_nodes_[node_id])});
+ }
+ return nodes;
+}
+
+auto DependencyGraph::CreateActionNode(Action const& action) noexcept
+ -> DependencyGraph::ActionNode* {
+ if (action.IsTreeAction() or not action.Command().empty()) {
+ auto const node_id = AddAction(action);
+ return &(*action_nodes_[node_id]);
+ }
+ return nullptr;
+}
+
+auto DependencyGraph::LinkNodePointers(
+ std::vector<NamedArtifactNodePtr> const& output_files,
+ std::vector<NamedArtifactNodePtr> const& output_dirs,
+ gsl::not_null<ActionNode*> const& action_node,
+ std::vector<NamedArtifactNodePtr> const& input_nodes) noexcept -> bool {
+ for (auto const& named_file : output_files) {
+ if (!named_file.node->AddBuilderActionNode(action_node) ||
+ !action_node->AddOutputFile(named_file)) {
+ return false;
+ }
+ }
+ for (auto const& named_dir : output_dirs) {
+ if (!named_dir.node->AddBuilderActionNode(action_node) ||
+ !action_node->AddOutputDir(named_dir)) {
+ return false;
+ }
+ }
+
+ for (auto const& named_node : input_nodes) {
+ if (!named_node.node->AddConsumerActionNode(action_node) ||
+ !action_node->AddDependency(named_node)) {
+ return false;
+ }
+ }
+
+ action_node->NotifyDoneLinking();
+
+ return true;
+}
+
+auto DependencyGraph::Add(std::vector<ActionDescription> const& actions)
+ -> bool {
+ for (auto const& action : actions) {
+ if (not AddAction(action)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+auto DependencyGraph::AddArtifact(ArtifactDescription const& description)
+ -> ArtifactIdentifier {
+ auto artifact = description.ToArtifact();
+ auto id = artifact.Id();
+ [[maybe_unused]] auto const node_id = AddArtifact(std::move(artifact));
+ return id;
+}
+
+auto DependencyGraph::AddAction(ActionDescription const& description) -> bool {
+ auto output_nodes =
+ CreateOutputArtifactNodes(description.Id(),
+ description.OutputFiles(),
+ description.OutputDirs(),
+ description.GraphAction().IsTreeAction());
+ auto* action_node = CreateActionNode(description.GraphAction());
+ auto input_nodes = CreateInputArtifactNodes(description.Inputs());
+
+ if (action_node == nullptr or not input_nodes.has_value() or
+ (output_nodes.first.empty() and output_nodes.second.empty())) {
+ return false;
+ }
+
+ return LinkNodePointers(
+ output_nodes.first, output_nodes.second, action_node, *input_nodes);
+}
+
+auto DependencyGraph::AddAction(Action const& a) noexcept
+ -> DependencyGraph::ActionNodeIdentifier {
+ auto id = a.Id();
+ auto const action_it = action_ids_.find(id);
+ if (action_it != action_ids_.end()) {
+ return action_it->second;
+ }
+ action_nodes_.emplace_back(ActionNode::Create(a));
+ ActionNodeIdentifier node_id{action_nodes_.size() - 1};
+ action_ids_[id] = node_id;
+ return node_id;
+}
+
+auto DependencyGraph::AddAction(Action&& a) noexcept
+ -> DependencyGraph::ActionNodeIdentifier {
+ auto const& id = a.Id();
+ auto const action_it = action_ids_.find(id);
+ if (action_it != action_ids_.end()) {
+ return action_it->second;
+ }
+ action_nodes_.emplace_back(ActionNode::Create(std::move(a)));
+ ActionNodeIdentifier node_id{action_nodes_.size() - 1};
+ action_ids_[id] = node_id;
+ return node_id;
+}
+
+auto DependencyGraph::AddArtifact(Artifact const& a) noexcept
+ -> DependencyGraph::ArtifactNodeIdentifier {
+ auto const& id = a.Id();
+ auto const artifact_it = artifact_ids_.find(id);
+ if (artifact_it != artifact_ids_.end()) {
+ return artifact_it->second;
+ }
+ artifact_nodes_.emplace_back(ArtifactNode::Create(a));
+ ArtifactNodeIdentifier node_id{artifact_nodes_.size() - 1};
+ artifact_ids_[id] = node_id;
+ return node_id;
+}
+
+auto DependencyGraph::AddArtifact(Artifact&& a) noexcept
+ -> DependencyGraph::ArtifactNodeIdentifier {
+ auto id = a.Id();
+ auto const artifact_it = artifact_ids_.find(id);
+ if (artifact_it != artifact_ids_.end()) {
+ return artifact_it->second;
+ }
+ artifact_nodes_.emplace_back(ArtifactNode::Create(std::move(a)));
+ ArtifactNodeIdentifier node_id{artifact_nodes_.size() - 1};
+ artifact_ids_[id] = node_id;
+ return node_id;
+}
+
+auto DependencyGraph::ArtifactIdentifiers() const noexcept
+ -> std::unordered_set<ArtifactIdentifier> {
+ std::unordered_set<ArtifactIdentifier> ids;
+ std::transform(
+ std::begin(artifact_ids_),
+ std::end(artifact_ids_),
+ std::inserter(ids, std::begin(ids)),
+ [](auto const& artifact_id_pair) { return artifact_id_pair.first; });
+ return ids;
+}
+
+auto DependencyGraph::ArtifactNodeWithId(ArtifactIdentifier const& id)
+ const noexcept -> DependencyGraph::ArtifactNode const* {
+ auto it_to_artifact = artifact_ids_.find(id);
+ if (it_to_artifact == artifact_ids_.end()) {
+ return nullptr;
+ }
+ return &(*artifact_nodes_[it_to_artifact->second]);
+}
+
+auto DependencyGraph::ActionNodeWithId(ActionIdentifier const& id)
+ const noexcept -> DependencyGraph::ActionNode const* {
+ auto it_to_action = action_ids_.find(id);
+ if (it_to_action == action_ids_.end()) {
+ return nullptr;
+ }
+ return &(*action_nodes_[it_to_action->second]);
+}
+
+auto DependencyGraph::ActionNodeOfArtifactWithId(ArtifactIdentifier const& id)
+ const noexcept -> DependencyGraph::ActionNode const* {
+ auto const* node = ArtifactNodeWithId(id);
+ if (node != nullptr) {
+ auto const& children = node->Children();
+ if (children.empty()) {
+ return nullptr;
+ }
+ return children[0];
+ }
+ return nullptr;
+}
+
+auto DependencyGraph::ArtifactWithId(
+ ArtifactIdentifier const& id) const noexcept -> std::optional<Artifact> {
+ auto const* node = ArtifactNodeWithId(id);
+ if (node != nullptr) {
+ return node->Content();
+ }
+ return std::nullopt;
+}
+
+[[nodiscard]] auto DependencyGraph::ActionWithId(
+ ActionIdentifier const& id) const noexcept -> std::optional<Action> {
+ auto const* node = ActionNodeWithId(id);
+ if (node != nullptr) {
+ return node->Content();
+ }
+ return std::nullopt;
+}
+
+auto DependencyGraph::ActionOfArtifactWithId(
+ ArtifactIdentifier const& artifact_id) const noexcept
+ -> std::optional<Action> {
+ auto const* node = ActionNodeOfArtifactWithId(artifact_id);
+ if (node != nullptr) {
+ return node->Content();
+ }
+ return std::nullopt;
+}
+
+auto DependencyGraph::ActionIdOfArtifactWithId(
+ ArtifactIdentifier const& artifact_id) const noexcept
+ -> std::optional<ActionIdentifier> {
+ auto const& action = ActionOfArtifactWithId(artifact_id);
+ if (action) {
+ return action->Id();
+ }
+ return std::nullopt;
+}
diff --git a/src/buildtool/execution_engine/dag/dag.hpp b/src/buildtool/execution_engine/dag/dag.hpp
new file mode 100644
index 00000000..4f29d79a
--- /dev/null
+++ b/src/buildtool/execution_engine/dag/dag.hpp
@@ -0,0 +1,613 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_ENGINE_DAG_DAG_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_ENGINE_DAG_DAG_HPP
+
+#include <algorithm>
+#include <atomic>
+#include <cstdint>
+#include <map>
+#include <memory>
+#include <optional>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+#include <variant>
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/action.hpp"
+#include "src/buildtool/common/action_description.hpp"
+#include "src/buildtool/common/artifact.hpp"
+#include "src/buildtool/common/artifact_description.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+#include "src/utils/cpp/type_safe_arithmetic.hpp"
+
+/// \brief Plain DirectedAcyclicGraph.
+/// Additional properties (e.g. bipartiteness) can be encoded in nodes.
+/// Deliberately not using \ref DirectedAcyclicGraph::Node anywhere to avoid
+/// vtable lookups. For now, it does not hold any data.
+class DirectedAcyclicGraph {
+ public:
+ /// \brief Abstract class for DAG nodes.
+ /// \tparam T_Content Type of content.
+ /// \tparam T_Other Type of neighboring nodes.
+ /// Sub classes need to implement \ref IsValid method.
+ /// TODO: once we have hashes, require sub classes to implement generating
+ /// IDs depending on its unique content.
+ template <typename T_Content, typename T_Other>
+ class Node {
+ public:
+ using Id = std::uintptr_t;
+ using OtherNode = T_Other;
+ using OtherNodePtr = gsl::not_null<OtherNode*>;
+
+ explicit Node(T_Content&& content) noexcept
+ : content_{std::move(content)} {}
+
+ // NOLINTNEXTLINE(modernize-pass-by-value)
+ explicit Node(T_Content const& content) noexcept : content_{content} {}
+
+ Node(T_Content const& content,
+ std::vector<OtherNodePtr> const& parents,
+ std::vector<OtherNodePtr> const& children) noexcept
+ : content_{content}, parents_{parents}, children_{children} {}
+
+ // No copies and no moves are allowed. The only way to "copy" an
+ // instance is to copy its raw pointer.
+ Node(Node const&) = delete;
+ Node(Node&&) = delete;
+ auto operator=(Node const&) -> Node& = delete;
+ auto operator=(Node &&) -> Node& = delete;
+
+ [[nodiscard]] auto NodeId() const noexcept -> Id {
+ // NOLINTNEXTLINE(cppcoreguidelines-pro-type-reinterpret-cast)
+ return reinterpret_cast<Id>(this);
+ }
+
+ [[nodiscard]] auto Content() const& noexcept -> T_Content const& {
+ return content_;
+ }
+
+ [[nodiscard]] auto Content() && noexcept -> T_Content {
+ return std::move(content_);
+ }
+
+ [[nodiscard]] auto Parents() const& noexcept
+ -> std::vector<OtherNodePtr> const& {
+ return parents_;
+ }
+
+ [[nodiscard]] auto Children() const& noexcept
+ -> std::vector<OtherNodePtr> const& {
+ return children_;
+ }
+
+ auto AddParent(OtherNodePtr const& parent) noexcept -> bool {
+ parents_.push_back(parent);
+ return true;
+ }
+
+ auto AddParent(OtherNodePtr&& parent) noexcept -> bool {
+ parents_.push_back(std::move(parent));
+ return true;
+ }
+
+ auto AddChild(OtherNodePtr const& child) noexcept -> bool {
+ children_.push_back(child);
+ return true;
+ }
+
+ auto AddChild(OtherNodePtr&& child) noexcept -> bool {
+ children_.push_back(std::move(child));
+ return true;
+ }
+
+ [[nodiscard]] virtual auto IsValid() const noexcept -> bool = 0;
+ virtual ~Node() noexcept = default;
+
+ private:
+ T_Content content_{};
+ std::vector<OtherNodePtr> parents_{};
+ std::vector<OtherNodePtr> children_{};
+ };
+
+ /// \brief Lock-free class for basic traversal state data
+ /// Provides the following atomic operations:
+ /// - Retrieve (previous) state and mark as discovered, which will allow us
+ /// to know whether we should queue a visit the node or not at the same
+ /// time that we mark that its visit should not be queued by other threads,
+ /// since it is being queued by the current caller to this method or it has
+ /// already been queued by a previous caller.
+ /// Note that "discovered" refers to "queued for visit" here.
+ /// - Retrieve (previous) state and mark as queued to be processed, which
+ /// will allow us to ensure that processing a node is queued at most once.
+ class NodeTraversalState {
+ public:
+ NodeTraversalState() noexcept = default;
+ NodeTraversalState(NodeTraversalState const&) = delete;
+ NodeTraversalState(NodeTraversalState&&) = delete;
+ auto operator=(NodeTraversalState const&)
+ -> NodeTraversalState& = delete;
+ auto operator=(NodeTraversalState &&) -> NodeTraversalState& = delete;
+ ~NodeTraversalState() noexcept = default;
+
+ /// \brief Sets traversal state as discovered
+ /// \returns True if it was already discovered, false otherwise
+ /// Note: this is an atomic, lock-free operation
+ [[nodiscard]] auto GetAndMarkDiscovered() noexcept -> bool {
+ return std::atomic_exchange(&has_been_discovered_, true);
+ }
+
+ /// \brief Sets traversal state as queued to be processed
+ /// \returns True if it was already queued to be processed, false
+ /// otherwise
+ /// Note: this is an atomic, lock-free operation
+ [[nodiscard]] auto GetAndMarkQueuedToBeProcessed() noexcept -> bool {
+ return std::atomic_exchange(&is_queued_to_be_processed_, true);
+ }
+
+ /// \brief Check if a node is required to be processed or not
+ [[nodiscard]] auto IsRequired() const noexcept -> bool {
+ return is_required_;
+ }
+
+ /// \brief Mark node as required to be executed
+ /// Note: this should be upon node discovery (visit) while traversing
+ /// the graph
+ void MarkRequired() noexcept { is_required_ = true; }
+
+ private:
+ std::atomic<bool> has_been_discovered_{false};
+ std::atomic<bool> is_queued_to_be_processed_{false};
+ std::atomic<bool> is_required_{false};
+ };
+
+ protected:
+ template <typename T_Node>
+ [[nodiscard]] static auto check_validity(
+ gsl::not_null<T_Node*> node) noexcept -> bool {
+ // Check node-specified validity (e.g. bipartiteness requirements)
+ if (!node->IsValid()) {
+ return false;
+ }
+
+ // Search for cycles
+ thread_local std::vector<typename T_Node::Id> stack{};
+ for (auto const& child : node->Children()) {
+ auto node_id = child->NodeId();
+
+ if (std::find(stack.begin(), stack.end(), node_id) != stack.end()) {
+ return false;
+ }
+
+ stack.push_back(node_id);
+ bool valid = check_validity(child);
+ stack.pop_back();
+
+ if (!valid) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+};
+
+class DependencyGraph : DirectedAcyclicGraph {
+ public:
+ // Forward declaration
+ class ArtifactNode;
+
+ // Node identifier for actions
+ struct ActionNodeIdentifierTag : type_safe_arithmetic_tag<std::size_t> {};
+ using ActionNodeIdentifier = type_safe_arithmetic<ActionNodeIdentifierTag>;
+
+ // Node identifier for artifacts
+ struct ArtifactNodeIdentifierTag : type_safe_arithmetic_tag<std::size_t> {};
+ using ArtifactNodeIdentifier =
+ type_safe_arithmetic<ArtifactNodeIdentifierTag>;
+
+ /// \brief Class for traversal state data specific for ActionNode's
+ /// Provides the following atomic operations (listed on the public methods):
+ class ActionNodeTraversalState : public NodeTraversalState {
+ public:
+ ActionNodeTraversalState() noexcept = default;
+ ActionNodeTraversalState(ActionNodeTraversalState const&) = delete;
+ ActionNodeTraversalState(ActionNodeTraversalState&&) = delete;
+ auto operator=(ActionNodeTraversalState const&)
+ -> ActionNodeTraversalState& = delete;
+ auto operator=(ActionNodeTraversalState &&)
+ -> ActionNodeTraversalState& = delete;
+ ~ActionNodeTraversalState() noexcept = default;
+
+ /// \brief Acknowledge that a dependency was made available and return
+ /// whether the action is ready to be executed
+ [[nodiscard]] auto NotifyAvailableDepAndCheckReady() noexcept -> bool {
+ return std::atomic_fetch_sub(&unavailable_deps_, 1) == 1;
+ }
+
+ /// \brief Check whether the action can be now executed or not.
+ /// Note: checking state without modifying (unlike
+ /// NotifyAvailableDepAndCheckReady()) is useful in the case that when
+ /// the action node is visited all its dependencies were already
+ /// available
+ [[nodiscard]] auto IsReady() const noexcept -> bool {
+ return unavailable_deps_ == 0;
+ }
+
+ /// \brief Initialise number of unavailable dependencies
+ /// \param[in] count Number of unavailable dependencies
+ /// Note: this method should be called previous to the start of the
+ /// traversal (once the action node is built)
+ void InitUnavailableDeps(std::size_t count) noexcept {
+ unavailable_deps_ = static_cast<int>(count);
+ }
+
+ private:
+ std::atomic<int> unavailable_deps_{-1};
+ };
+
+ /// \brief Class for traversal state data specific for ArtifactNode's
+ /// Provides the following atomic operations:
+ /// - Mark the artifact in this node as available
+ /// - Check whether the artifact in this node is available or not
+ class ArtifactNodeTraversalState : public NodeTraversalState {
+ public:
+ ArtifactNodeTraversalState() noexcept = default;
+ ArtifactNodeTraversalState(ArtifactNodeTraversalState const&) = delete;
+ ArtifactNodeTraversalState(ArtifactNodeTraversalState&&) = delete;
+ auto operator=(ArtifactNodeTraversalState const&)
+ -> ArtifactNodeTraversalState& = delete;
+ auto operator=(ArtifactNodeTraversalState &&)
+ -> ArtifactNodeTraversalState& = delete;
+ ~ArtifactNodeTraversalState() noexcept = default;
+
+ [[nodiscard]] auto IsAvailable() const noexcept -> bool {
+ return is_available_;
+ }
+
+ void MakeAvailable() noexcept { is_available_ = true; }
+
+ private:
+ std::atomic<bool> is_available_{false};
+ };
+
+ /// \brief Action node (bipartite)
+ /// Cannot be entry (see \ref IsValid).
+ class ActionNode final : public Node<Action, ArtifactNode> {
+ using base = Node<Action, ArtifactNode>;
+
+ public:
+ using base::base;
+ using Ptr = gsl::not_null<std::unique_ptr<ActionNode>>;
+ struct NamedOtherNodePtr {
+ Action::LocalPath path;
+ base::OtherNodePtr node;
+ };
+
+ [[nodiscard]] static auto Create(Action const& content) noexcept
+ -> Ptr {
+ return std::make_unique<ActionNode>(content);
+ }
+
+ [[nodiscard]] static auto Create(Action&& content) noexcept -> Ptr {
+ return std::make_unique<ActionNode>(std::move(content));
+ }
+
+ // only valid if it has parents
+ [[nodiscard]] auto IsValid() const noexcept -> bool final {
+ return (!base::Parents().empty());
+ }
+
+ [[nodiscard]] auto AddOutputFile(
+ NamedOtherNodePtr const& output) noexcept -> bool {
+ base::AddParent(output.node);
+ output_files_.push_back(output);
+ return true;
+ }
+
+ [[nodiscard]] auto AddOutputFile(NamedOtherNodePtr&& output) noexcept
+ -> bool {
+ base::AddParent(output.node);
+ output_files_.push_back(std::move(output));
+ return true;
+ }
+
+ [[nodiscard]] auto AddOutputDir(
+ NamedOtherNodePtr const& output) noexcept -> bool {
+ base::AddParent(output.node);
+ output_dirs_.push_back(output);
+ return true;
+ }
+
+ [[nodiscard]] auto AddOutputDir(NamedOtherNodePtr&& output) noexcept
+ -> bool {
+ base::AddParent(output.node);
+ output_dirs_.push_back(std::move(output));
+ return true;
+ }
+
+ [[nodiscard]] auto AddDependency(
+ NamedOtherNodePtr const& dependency) noexcept -> bool {
+ base::AddChild(dependency.node);
+ dependencies_.push_back(dependency);
+ return true;
+ }
+
+ [[nodiscard]] auto AddDependency(
+ NamedOtherNodePtr&& dependency) noexcept -> bool {
+ base::AddChild(dependency.node);
+ dependencies_.push_back(std::move(dependency));
+ return true;
+ }
+
+ [[nodiscard]] auto OutputFiles()
+ const& -> std::vector<NamedOtherNodePtr> const& {
+ return output_files_;
+ }
+
+ [[nodiscard]] auto OutputDirs()
+ const& -> std::vector<NamedOtherNodePtr> const& {
+ return output_dirs_;
+ }
+
+ [[nodiscard]] auto Command() const -> std::vector<std::string> {
+ return Content().Command();
+ }
+
+ [[nodiscard]] auto Env() const -> std::map<std::string, std::string> {
+ return Content().Env();
+ }
+
+ [[nodiscard]] auto MayFail() const -> std::optional<std::string> {
+ return Content().MayFail();
+ }
+
+ [[nodiscard]] auto NoCache() const -> bool {
+ return Content().NoCache();
+ }
+
+ [[nodiscard]] auto Dependencies()
+ const& -> std::vector<NamedOtherNodePtr> const& {
+ return dependencies_;
+ }
+
+ [[nodiscard]] auto OutputFilePaths() const
+ -> std::vector<Action::LocalPath> {
+ return NodePaths(output_files_);
+ }
+
+ [[nodiscard]] auto OutputFileIds() const
+ -> std::vector<ArtifactIdentifier> {
+ return Ids(output_files_);
+ }
+
+ [[nodiscard]] auto OutputDirPaths() const
+ -> std::vector<Action::LocalPath> {
+ return NodePaths(output_dirs_);
+ }
+
+ [[nodiscard]] auto OutputDirIds() const
+ -> std::vector<ArtifactIdentifier> {
+ return Ids(output_dirs_);
+ }
+
+ [[nodiscard]] auto DependencyPaths() const
+ -> std::vector<Action::LocalPath> {
+ return NodePaths(dependencies_);
+ }
+
+ [[nodiscard]] auto DependencyIds() const
+ -> std::vector<ArtifactIdentifier> {
+ return Ids(dependencies_);
+ }
+
+ // To initialise the action traversal specific data before traversing
+ // the graph
+ void NotifyDoneLinking() const noexcept {
+ traversal_state_->InitUnavailableDeps(Children().size());
+ }
+
+ [[nodiscard]] auto TraversalState() const noexcept
+ -> ActionNodeTraversalState* {
+ return traversal_state_.get();
+ }
+
+ private:
+ std::vector<NamedOtherNodePtr> output_files_;
+ std::vector<NamedOtherNodePtr> output_dirs_;
+ std::vector<NamedOtherNodePtr> dependencies_;
+ std::unique_ptr<ActionNodeTraversalState> traversal_state_{
+ std::make_unique<ActionNodeTraversalState>()};
+
+ // Collect paths from named nodes.
+ // TODO(oreiche): This could be potentially speed up by using a wrapper
+ // iterator to provide a read-only view (similar to BlobContainer)
+ [[nodiscard]] static auto NodePaths(
+ std::vector<NamedOtherNodePtr> const& nodes)
+ -> std::vector<Action::LocalPath> {
+ std::vector<Action::LocalPath> paths{nodes.size()};
+ std::transform(
+ nodes.cbegin(),
+ nodes.cend(),
+ paths.begin(),
+ [](auto const& named_node) { return named_node.path; });
+ return paths;
+ }
+
+ /// \brief Collect ids from named nodes (artifacts in this case)
+ [[nodiscard]] static auto Ids(
+ std::vector<NamedOtherNodePtr> const& nodes)
+ -> std::vector<ArtifactIdentifier> {
+ std::vector<ArtifactIdentifier> ids{nodes.size()};
+ std::transform(nodes.cbegin(),
+ nodes.cend(),
+ ids.begin(),
+ [](auto const& named_node) {
+ return named_node.node->Content().Id();
+ });
+ return ids;
+ }
+ };
+
+ /// \brief Artifact node (bipartite)
+ /// Can be entry or leaf (see \ref IsValid) and can only have single child
+ /// (see \ref AddChild)
+ class ArtifactNode final : public Node<Artifact, ActionNode> {
+ using base = Node<Artifact, ActionNode>;
+
+ public:
+ using base::base;
+ using typename base::OtherNode;
+ using typename base::OtherNodePtr;
+ using Ptr = gsl::not_null<std::unique_ptr<ArtifactNode>>;
+
+ [[nodiscard]] static auto Create(Artifact const& content) noexcept
+ -> Ptr {
+ return std::make_unique<ArtifactNode>(content);
+ }
+
+ [[nodiscard]] static auto Create(Artifact&& content) noexcept -> Ptr {
+ return std::make_unique<ArtifactNode>(std::move(content));
+ }
+
+ [[nodiscard]] auto AddBuilderActionNode(
+ OtherNodePtr const& action) noexcept -> bool {
+ if (base::Children().empty()) {
+ return base::AddChild(action);
+ }
+ Logger::Log(LogLevel::Error,
+ "cannot set a second builder for artifact {}",
+ ToHexString(Content().Id()));
+ return false;
+ }
+
+ [[nodiscard]] auto AddConsumerActionNode(
+ OtherNodePtr const& action) noexcept -> bool {
+ return base::AddParent(action);
+ }
+
+ [[nodiscard]] auto IsValid() const noexcept -> bool final {
+ return base::Children().size() <= 1;
+ }
+
+ [[nodiscard]] auto HasBuilderAction() const noexcept -> bool {
+ return !base::Children().empty();
+ }
+
+ [[nodiscard]] auto BuilderActionNode() const noexcept
+ -> ActionNode const* {
+ return HasBuilderAction() ? base::Children()[0].get() : nullptr;
+ }
+
+ [[nodiscard]] auto TraversalState() const noexcept
+ -> ArtifactNodeTraversalState* {
+ return traversal_state_.get();
+ }
+
+ private:
+ std::unique_ptr<ArtifactNodeTraversalState> traversal_state_{
+ std::make_unique<ArtifactNodeTraversalState>()};
+ };
+
+ using NamedArtifactNodePtr = ActionNode::NamedOtherNodePtr;
+
+ DependencyGraph() noexcept = default;
+
+ // DependencyGraph should not be copiable or movable. This could be changed
+ // in the case we want to make the graph construction to be functional
+ DependencyGraph(DependencyGraph const&) = delete;
+ DependencyGraph(DependencyGraph&&) = delete;
+ auto operator=(DependencyGraph const&) -> DependencyGraph& = delete;
+ auto operator=(DependencyGraph &&) -> DependencyGraph& = delete;
+ ~DependencyGraph() noexcept = default;
+
+ [[nodiscard]] auto Add(std::vector<ActionDescription> const& actions)
+ -> bool;
+
+ [[nodiscard]] auto AddAction(ActionDescription const& description) -> bool;
+
+ [[nodiscard]] auto AddArtifact(ArtifactDescription const& description)
+ -> ArtifactIdentifier;
+
+ [[nodiscard]] auto ArtifactIdentifiers() const noexcept
+ -> std::unordered_set<ArtifactIdentifier>;
+
+ [[nodiscard]] auto ArtifactNodeWithId(
+ ArtifactIdentifier const& id) const noexcept -> ArtifactNode const*;
+
+ [[nodiscard]] auto ActionNodeWithId(
+ ActionIdentifier const& id) const noexcept -> ActionNode const*;
+
+ [[nodiscard]] auto ActionNodeOfArtifactWithId(
+ ArtifactIdentifier const& artifact_id) const noexcept
+ -> ActionNode const*;
+
+ [[nodiscard]] auto ArtifactWithId(
+ ArtifactIdentifier const& id) const noexcept -> std::optional<Artifact>;
+
+ [[nodiscard]] auto ActionWithId(ActionIdentifier const& id) const noexcept
+ -> std::optional<Action>;
+
+ [[nodiscard]] auto ActionOfArtifactWithId(
+ ArtifactIdentifier const& artifact_id) const noexcept
+ -> std::optional<Action>;
+
+ [[nodiscard]] auto ActionIdOfArtifactWithId(
+ ArtifactIdentifier const& artifact_id) const noexcept
+ -> std::optional<ActionIdentifier>;
+
+ [[nodiscard]] auto IsValid() const noexcept -> bool {
+ for (auto const& node : artifact_nodes_) {
+ if (!DirectedAcyclicGraph::check_validity<
+ std::remove_reference_t<decltype(*node)>>(&(*node))) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private:
+ // List of action nodes we already created
+ std::vector<ActionNode::Ptr> action_nodes_{};
+
+ // List of artifact nodes we already created
+ std::vector<ArtifactNode::Ptr> artifact_nodes_{};
+
+ // Associates global action identifier to local node id
+ std::unordered_map<ActionIdentifier, ActionNodeIdentifier> action_ids_{};
+
+ // Associates global artifact identifier to local node id
+ std::unordered_map<ArtifactIdentifier, ArtifactNodeIdentifier>
+ artifact_ids_{};
+
+ [[nodiscard]] auto CreateOutputArtifactNodes(
+ std::string const& action_id,
+ std::vector<std::string> const& file_paths,
+ std::vector<std::string> const& dir_paths,
+ bool is_tree_action)
+ -> std::pair<std::vector<DependencyGraph::NamedArtifactNodePtr>,
+ std::vector<DependencyGraph::NamedArtifactNodePtr>>;
+
+ [[nodiscard]] auto CreateInputArtifactNodes(
+ ActionDescription::inputs_t const& inputs)
+ -> std::optional<std::vector<NamedArtifactNodePtr>>;
+
+ [[nodiscard]] auto CreateActionNode(Action const& action) noexcept
+ -> ActionNode*;
+
+ [[nodiscard]] static auto LinkNodePointers(
+ std::vector<NamedArtifactNodePtr> const& output_files,
+ std::vector<NamedArtifactNodePtr> const& output_dirs,
+ gsl::not_null<ActionNode*> const& action_node,
+ std::vector<NamedArtifactNodePtr> const& input_nodes) noexcept -> bool;
+
+ [[nodiscard]] auto AddAction(Action const& a) noexcept
+ -> ActionNodeIdentifier;
+ [[nodiscard]] auto AddAction(Action&& a) noexcept -> ActionNodeIdentifier;
+ [[nodiscard]] auto AddArtifact(Artifact const& a) noexcept
+ -> ArtifactNodeIdentifier;
+ [[nodiscard]] auto AddArtifact(Artifact&& a) noexcept
+ -> ArtifactNodeIdentifier;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_ENGINE_DAG_DAG_HPP
diff --git a/src/buildtool/execution_engine/executor/TARGETS b/src/buildtool/execution_engine/executor/TARGETS
new file mode 100644
index 00000000..de0fd8f3
--- /dev/null
+++ b/src/buildtool/execution_engine/executor/TARGETS
@@ -0,0 +1,16 @@
+{ "executor":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["executor"]
+ , "hdrs": ["executor.hpp"]
+ , "deps":
+ [ ["src/buildtool/logging", "logging"]
+ , ["src/buildtool/common", "config"]
+ , ["src/buildtool/common", "tree"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["src/buildtool/execution_engine/dag", "dag"]
+ , ["src/buildtool/execution_api/common", "common"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "execution_engine", "executor"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/execution_engine/executor/executor.hpp b/src/buildtool/execution_engine/executor/executor.hpp
new file mode 100644
index 00000000..d7447ed8
--- /dev/null
+++ b/src/buildtool/execution_engine/executor/executor.hpp
@@ -0,0 +1,532 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_ENGINE_EXECUTOR_EXECUTOR_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_ENGINE_EXECUTOR_EXECUTOR_HPP
+
+#include <algorithm>
+#include <functional>
+#include <iostream>
+#include <map>
+#include <optional>
+#include <type_traits>
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/repository_config.hpp"
+#include "src/buildtool/common/statistics.hpp"
+#include "src/buildtool/common/tree.hpp"
+#include "src/buildtool/execution_api/common/execution_api.hpp"
+#include "src/buildtool/execution_engine/dag/dag.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+/// \brief Implementations for executing actions and uploading artifacts.
+class ExecutorImpl {
+ public:
+ /// \brief Execute action and obtain response.
+ /// \returns std::nullopt for actions without response (e.g., tree actions).
+ /// \returns nullptr on error.
+ [[nodiscard]] static auto ExecuteAction(
+ Logger const& logger,
+ gsl::not_null<DependencyGraph::ActionNode const*> const& action,
+ gsl::not_null<IExecutionApi*> const& api,
+ std::map<std::string, std::string> const& properties,
+ IExecutionAction::CacheFlag cache_flag)
+ -> std::optional<IExecutionResponse::Ptr> {
+ auto const& inputs = action->Dependencies();
+ auto const root_digest = CreateRootDigest(api, inputs);
+ if (not root_digest) {
+ Logger::Log(LogLevel::Error,
+ "failed to create root digest for input artifacts.");
+ return nullptr;
+ }
+
+ if (action->Content().IsTreeAction()) {
+ auto const& tree_artifact = action->OutputDirs()[0].node->Content();
+ bool failed_inputs = false;
+ for (auto const& [local_path, artifact] : inputs) {
+ failed_inputs |= artifact->Content().Info()->failed;
+ }
+ tree_artifact.SetObjectInfo(
+ *root_digest, ObjectType::Tree, failed_inputs);
+ return std::nullopt;
+ }
+
+ Statistics::Instance().IncrementActionsQueuedCounter();
+
+ logger.Emit(LogLevel::Trace, [&inputs]() {
+ std::ostringstream oss{};
+ oss << "start processing" << std::endl;
+ for (auto const& [local_path, artifact] : inputs) {
+ auto const& info = artifact->Content().Info();
+ oss << fmt::format(
+ " - needs {} {}",
+ local_path,
+ info ? info->ToString() : std::string{"[???]"})
+ << std::endl;
+ }
+ return oss.str();
+ });
+
+ auto remote_action = api->CreateAction(*root_digest,
+ action->Command(),
+ action->OutputFilePaths(),
+ action->OutputDirPaths(),
+ action->Env(),
+ properties);
+
+ if (remote_action == nullptr) {
+ logger.Emit(LogLevel::Error,
+ "failed to create action for execution.");
+ return nullptr;
+ }
+
+ // set action options
+ remote_action->SetCacheFlag(cache_flag);
+ remote_action->SetTimeout(IExecutionAction::kDefaultTimeout);
+ return remote_action->Execute(&logger);
+ }
+
+ /// \brief Ensures the artifact is available to the CAS, either checking
+ /// that its existing digest corresponds to that of an object already
+ /// available or by uploading it if there is no digest in the artifact. In
+ /// the later case, the new digest is saved in the artifact
+ /// \param[in] artifact The artifact to process.
+ /// \returns True if artifact is available at the point of return, false
+ /// otherwise
+ [[nodiscard]] static auto VerifyOrUploadArtifact(
+ gsl::not_null<DependencyGraph::ArtifactNode const*> const& artifact,
+ gsl::not_null<IExecutionApi*> const& api) noexcept -> bool {
+ auto const object_info_opt = artifact->Content().Info();
+ auto const file_path_opt = artifact->Content().FilePath();
+ // If there is no object info and no file path, the artifact can not be
+ // processed: it means its definition is ill-formed or that it is the
+ // output of an action, in which case it shouldn't have reached here
+ if (not object_info_opt and not file_path_opt) {
+ Logger::Log(LogLevel::Error,
+ "artifact {} can not be processed.",
+ artifact->Content().Id());
+ return false;
+ }
+ // If the artifact has digest, we check that an object with this digest
+ // is available to the execution API
+ if (object_info_opt) {
+ if (not api->IsAvailable(object_info_opt->digest) and
+ not UploadGitBlob(api,
+ artifact->Content().Repository(),
+ object_info_opt->digest,
+ /*skip_check=*/true)) {
+ Logger::Log(
+ LogLevel::Error,
+ "artifact {} should be present in CAS but is missing.",
+ artifact->Content().Id());
+ return false;
+ }
+ return true;
+ }
+
+ // Otherwise, we upload the new file to make it available to the
+ // execution API
+ // Note that we can be sure now that file_path_opt has a value and
+ // that the path stored is relative to the workspace dir, so we need to
+ // prepend it
+ auto repo = artifact->Content().Repository();
+ auto new_info = UploadFile(api, repo, *file_path_opt);
+ if (not new_info) {
+ Logger::Log(LogLevel::Error,
+ "artifact in {} could not be uploaded to CAS.",
+ file_path_opt->string());
+ return false;
+ }
+
+ // And we save the digest object type in the artifact
+ artifact->Content().SetObjectInfo(*new_info, false);
+ return true;
+ }
+
+ /// \brief Lookup blob via digest in local git repositories and upload.
+ /// \param api The endpoint used for uploading
+ /// \param repo The global repository name, the artifact belongs to
+ /// \param digest The digest of the object
+ /// \param skip_check Skip check for existence before upload
+ /// \returns true on success
+ [[nodiscard]] static auto UploadGitBlob(
+ gsl::not_null<IExecutionApi*> const& api,
+ std::string const& repo,
+ ArtifactDigest const& digest,
+ bool skip_check) noexcept -> bool {
+ auto const& repo_config = RepositoryConfig::Instance();
+ std::optional<std::string> blob{};
+ if (auto const* ws_root = repo_config.WorkspaceRoot(repo)) {
+ // try to obtain blob from local workspace's Git CAS, if any
+ blob = ws_root->ReadBlob(digest.hash());
+ }
+ if (not blob) {
+ // try to obtain blob from global Git CAS, if any
+ blob = repo_config.ReadBlobFromGitCAS(digest.hash());
+ }
+ return blob and
+ api->Upload(BlobContainer{{BazelBlob{digest, std::move(*blob)}}},
+ skip_check);
+ }
+
+ /// \brief Lookup file via path in local workspace root and upload.
+ /// \param api The endpoint used for uploading
+ /// \param repo The global repository name, the artifact belongs to
+ /// \param file_path The path of the file to be read
+ /// \returns The computed object info on success
+ [[nodiscard]] static auto UploadFile(
+ gsl::not_null<IExecutionApi*> const& api,
+ std::string const& repo,
+ std::filesystem::path const& file_path) noexcept
+ -> std::optional<Artifact::ObjectInfo> {
+ auto const* ws_root = RepositoryConfig::Instance().WorkspaceRoot(repo);
+ if (ws_root == nullptr) {
+ return std::nullopt;
+ }
+ auto const object_type = ws_root->FileType(file_path);
+ if (not object_type) {
+ return std::nullopt;
+ }
+ auto content = ws_root->ReadFile(file_path);
+ if (not content.has_value()) {
+ return std::nullopt;
+ }
+ auto digest = ArtifactDigest{ComputeHash(*content), content->size()};
+ if (not api->Upload(
+ BlobContainer{{BazelBlob{digest, std::move(*content)}}})) {
+ return std::nullopt;
+ }
+ return Artifact::ObjectInfo{std::move(digest), *object_type};
+ }
+
+ /// \brief Add digests and object type to artifact nodes for all outputs of
+ /// the action that was run
+ void static SaveObjectInfo(
+ IExecutionResponse::ArtifactInfos const& artifacts,
+ gsl::not_null<DependencyGraph::ActionNode const*> const& action,
+ bool fail_artifacts) noexcept {
+ for (auto const& [name, node] : action->OutputFiles()) {
+ node->Content().SetObjectInfo(artifacts.at(name), fail_artifacts);
+ }
+ for (auto const& [name, node] : action->OutputDirs()) {
+ node->Content().SetObjectInfo(artifacts.at(name), fail_artifacts);
+ }
+ }
+
+ /// \brief Create root tree digest for input artifacts.
+ /// \param api The endpoint required for uploading
+ /// \param artifacts The artifacts to create the root tree digest from
+ [[nodiscard]] static auto CreateRootDigest(
+ gsl::not_null<IExecutionApi*> const& api,
+ std::vector<DependencyGraph::NamedArtifactNodePtr> const&
+ artifacts) noexcept -> std::optional<ArtifactDigest> {
+ if (artifacts.size() == 1 and
+ (artifacts.at(0).path == "." or artifacts.at(0).path.empty())) {
+ auto const& info = artifacts.at(0).node->Content().Info();
+ if (info and IsTreeObject(info->type)) {
+ // Artifact list contains single tree with path "." or "". Reuse
+ // the existing tree artifact by returning its digest.
+ return info->digest;
+ }
+ }
+ return api->UploadTree(artifacts);
+ }
+ /// \brief Check that all outputs expected from the action description
+ /// are present in the artifacts map
+ [[nodiscard]] static auto CheckOutputsExist(
+ IExecutionResponse::ArtifactInfos const& artifacts,
+ std::vector<Action::LocalPath> const& outputs) noexcept -> bool {
+ for (auto const& output : outputs) {
+ if (not artifacts.contains(output)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /// \brief Parse response and write object info to DAG's artifact nodes.
+ /// \returns false on non-zero exit code or if output artifacts are missing
+ [[nodiscard]] static auto ParseResponse(
+ Logger const& logger,
+ IExecutionResponse::Ptr const& response,
+ gsl::not_null<DependencyGraph::ActionNode const*> const& action)
+ -> bool {
+ logger.Emit(LogLevel::Trace, "finished execution");
+
+ if (!response) {
+ logger.Emit(LogLevel::Trace, "response is empty");
+ return false;
+ }
+
+ if (response->IsCached()) {
+ logger.Emit(LogLevel::Trace, " - served from cache");
+ Statistics::Instance().IncrementActionsCachedCounter();
+ }
+
+ PrintInfo(logger, action->Command(), response);
+ bool should_fail_outputs = false;
+ for (auto const& [local_path, node] : action->Dependencies()) {
+ should_fail_outputs |= node->Content().Info()->failed;
+ }
+ if (response->ExitCode() != 0) {
+ if (action->MayFail()) {
+ logger.Emit(LogLevel::Warning,
+ "{} (exit code {})",
+ *(action->MayFail()),
+ response->ExitCode());
+ should_fail_outputs = true;
+ }
+ else {
+ logger.Emit(LogLevel::Error,
+ "action returned non-zero exit code {}",
+ response->ExitCode());
+ return false;
+ }
+ }
+
+ auto artifacts = response->Artifacts();
+ auto output_files = action->OutputFilePaths();
+ auto output_dirs = action->OutputDirPaths();
+
+ if (artifacts.empty() or
+ not CheckOutputsExist(artifacts, output_files) or
+ not CheckOutputsExist(artifacts, output_dirs)) {
+ logger.Emit(LogLevel::Error, [&] {
+ std::string message{
+ "action executed with missing outputs.\n"
+ " Action outputs should be the following artifacts:"};
+ for (auto const& output : output_files) {
+ message += "\n - " + output;
+ }
+ return message;
+ });
+ return false;
+ }
+
+ SaveObjectInfo(artifacts, action, should_fail_outputs);
+
+ return true;
+ }
+
+ /// \brief Write out if response is empty and otherwise, write out
+ /// standard error/output if they are present
+ void static PrintInfo(Logger const& logger,
+ std::vector<std::string> const& command,
+ IExecutionResponse::Ptr const& response) noexcept {
+ if (!response) {
+ logger.Emit(LogLevel::Error, "response is empty");
+ return;
+ }
+ auto has_err = response->HasStdErr();
+ auto has_out = response->HasStdOut();
+ if (has_err or has_out) {
+ logger.Emit(LogLevel::Info, [&] {
+ auto message = std::string{has_err and has_out
+ ? "Output and error"
+ : has_out ? "Output" : "Error"} +
+ " of command: ";
+ message += nlohmann::json{command}.dump();
+ if (response->HasStdOut()) {
+ message += "\n" + response->StdOut();
+ }
+ if (response->HasStdErr()) {
+ message += "\n" + response->StdErr();
+ }
+ return message;
+ });
+ }
+ }
+};
+
+/// \brief Executor for using concrete Execution API.
+class Executor {
+ using Impl = ExecutorImpl;
+ using CF = IExecutionAction::CacheFlag;
+
+ public:
+ explicit Executor(IExecutionApi* api,
+ std::map<std::string, std::string> properties)
+ : api_{api}, properties_{std::move(properties)} {}
+
+ /// \brief Run an action in a blocking manner
+ /// This method must be thread-safe as it could be called in parallel
+ /// \param[in] action The action to execute.
+ /// \returns True if execution was successful, false otherwise
+ [[nodiscard]] auto Process(
+ gsl::not_null<DependencyGraph::ActionNode const*> const& action)
+ const noexcept -> bool {
+ Logger logger("action:" + action->Content().Id());
+
+ auto const response = Impl::ExecuteAction(
+ logger,
+ action,
+ api_,
+ properties_,
+ action->NoCache() ? CF::DoNotCacheOutput : CF::CacheOutput);
+
+ // check response and save digests of results
+ return not response or Impl::ParseResponse(logger, *response, action);
+ }
+
+ /// \brief Check artifact is available to the CAS or upload it.
+ /// \param[in] artifact The artifact to process.
+ /// \returns True if artifact is available or uploaded, false otherwise
+ [[nodiscard]] auto Process(
+ gsl::not_null<DependencyGraph::ArtifactNode const*> const& artifact)
+ const noexcept -> bool {
+ return Impl::VerifyOrUploadArtifact(artifact, api_);
+ }
+
+ private:
+ gsl::not_null<IExecutionApi*> api_;
+ std::map<std::string, std::string> properties_;
+};
+
+/// \brief Rebuilder for running and comparing actions of two API endpoints.
+class Rebuilder {
+ using Impl = ExecutorImpl;
+ using CF = IExecutionAction::CacheFlag;
+
+ public:
+ /// \brief Create rebuilder for action comparision of two endpoints.
+ /// \param api Rebuild endpoint, executes without action cache.
+ /// \param api_cached Reference endpoint, serves everything from cache.
+ /// \param properties Platform properties for execution.
+ Rebuilder(IExecutionApi* api,
+ IExecutionApi* api_cached,
+ std::map<std::string, std::string> properties)
+ : api_{api},
+ api_cached_{api_cached},
+ properties_{std::move(properties)} {}
+
+ [[nodiscard]] auto Process(
+ gsl::not_null<DependencyGraph::ActionNode const*> const& action)
+ const noexcept -> bool {
+ auto const& action_id = action->Content().Id();
+ Logger logger("rebuild:" + action_id);
+ auto response = Impl::ExecuteAction(
+ logger, action, api_, properties_, CF::PretendCached);
+
+ if (not response) {
+ return true; // action without response (e.g., tree action)
+ }
+
+ Logger logger_cached("cached:" + action_id);
+ auto response_cached = Impl::ExecuteAction(
+ logger_cached, action, api_cached_, properties_, CF::FromCacheOnly);
+
+ if (not response_cached) {
+ logger_cached.Emit(LogLevel::Error,
+ "expected regular action with response");
+ return false;
+ }
+
+ DetectFlakyAction(*response, *response_cached, action->Content());
+ return Impl::ParseResponse(logger, *response, action);
+ }
+
+ [[nodiscard]] auto Process(
+ gsl::not_null<DependencyGraph::ArtifactNode const*> const& artifact)
+ const noexcept -> bool {
+ return Impl::VerifyOrUploadArtifact(artifact, api_);
+ }
+
+ [[nodiscard]] auto DumpFlakyActions() const noexcept -> nlohmann::json {
+ std::unique_lock lock{m_};
+ auto actions = nlohmann::json::object();
+ for (auto const& [action_id, outputs] : flaky_actions_) {
+ for (auto const& [path, infos] : outputs) {
+ actions[action_id][path]["rebuilt"] = infos.first.ToJson();
+ actions[action_id][path]["cached"] = infos.second.ToJson();
+ }
+ }
+ return {{"flaky actions", actions}, {"cache misses", cache_misses_}};
+ }
+
+ private:
+ gsl::not_null<IExecutionApi*> api_;
+ gsl::not_null<IExecutionApi*> api_cached_;
+ std::map<std::string, std::string> properties_;
+ mutable std::mutex m_;
+ mutable std::vector<std::string> cache_misses_{};
+ mutable std::unordered_map<
+ std::string,
+ std::unordered_map<
+ std::string,
+ std::pair<Artifact::ObjectInfo, Artifact::ObjectInfo>>>
+ flaky_actions_{};
+
+ void DetectFlakyAction(IExecutionResponse::Ptr const& response,
+ IExecutionResponse::Ptr const& response_cached,
+ Action const& action) const noexcept {
+ if (response and response_cached and
+ response_cached->ActionDigest() == response->ActionDigest()) {
+ Statistics::Instance().IncrementRebuiltActionComparedCounter();
+ auto artifacts = response->Artifacts();
+ auto artifacts_cached = response_cached->Artifacts();
+ std::ostringstream msg{};
+ for (auto const& [path, info] : artifacts) {
+ auto const& info_cached = artifacts_cached[path];
+ if (info != info_cached) {
+ RecordFlakyAction(&msg, action, path, info, info_cached);
+ }
+ }
+ if (msg.tellp() > 0) {
+ Statistics::Instance().IncrementActionsFlakyCounter();
+ bool tainted = action.MayFail() or action.NoCache();
+ if (tainted) {
+ Statistics::Instance()
+ .IncrementActionsFlakyTaintedCounter();
+ }
+ Logger::Log(tainted ? LogLevel::Debug : LogLevel::Warning,
+ "{}",
+ msg.str());
+ }
+ }
+ else {
+ Statistics::Instance().IncrementRebuiltActionMissingCounter();
+ std::unique_lock lock{m_};
+ cache_misses_.emplace_back(action.Id());
+ }
+ }
+
+ void RecordFlakyAction(gsl::not_null<std::ostringstream*> const& msg,
+ Action const& action,
+ std::string const& path,
+ Artifact::ObjectInfo const& rebuilt,
+ Artifact::ObjectInfo const& cached) const noexcept {
+ auto const& action_id = action.Id();
+ if (msg->tellp() <= 0) {
+ bool tainted = action.MayFail() or action.NoCache();
+ static constexpr auto kMaxCmdChars = 69; // 80 - (prefix + suffix)
+ auto cmd = GetCmdString(action);
+ (*msg) << "Found flaky " << (tainted ? "tainted " : "")
+ << "action:" << std::endl
+ << " - id: " << action_id << std::endl
+ << " - cmd: " << cmd.substr(0, kMaxCmdChars)
+ << (cmd.length() > kMaxCmdChars ? "..." : "") << std::endl;
+ }
+ (*msg) << " - output '" << path << "' differs:" << std::endl
+ << " - " << rebuilt.ToString() << " (rebuilt)" << std::endl
+ << " - " << cached.ToString() << " (cached)" << std::endl;
+
+ std::unique_lock lock{m_};
+ auto& object_map = flaky_actions_[action_id];
+ try {
+ object_map.emplace(path, std::make_pair(rebuilt, cached));
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "recoding flaky action failed with: {}",
+ ex.what());
+ }
+ }
+
+ static auto GetCmdString(Action const& action) noexcept -> std::string {
+ try {
+ return nlohmann::json(action.Command()).dump();
+ } catch (std::exception const& ex) {
+ return fmt::format("<exception: {}>", ex.what());
+ }
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_ENGINE_EXECUTOR_EXECUTOR_HPP
diff --git a/src/buildtool/execution_engine/traverser/TARGETS b/src/buildtool/execution_engine/traverser/TARGETS
new file mode 100644
index 00000000..eb306b6e
--- /dev/null
+++ b/src/buildtool/execution_engine/traverser/TARGETS
@@ -0,0 +1,14 @@
+{ "traverser":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["traverser"]
+ , "hdrs": ["traverser.hpp"]
+ , "deps":
+ [ ["src/buildtool/execution_engine/dag", "dag"]
+ , ["src/buildtool/multithreading", "task_system"]
+ , ["src/buildtool/logging", "logging"]
+ , ["src/utils/cpp", "concepts"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "execution_engine", "traverser"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/execution_engine/traverser/traverser.hpp b/src/buildtool/execution_engine/traverser/traverser.hpp
new file mode 100644
index 00000000..ea44f30c
--- /dev/null
+++ b/src/buildtool/execution_engine/traverser/traverser.hpp
@@ -0,0 +1,187 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_EXECUTION_ENGINE_TRAVERSER_TRAVERSER_HPP
+#define INCLUDED_SRC_BUILDTOOL_EXECUTION_ENGINE_TRAVERSER_TRAVERSER_HPP
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/execution_engine/dag/dag.hpp"
+#include "src/buildtool/logging/logger.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+#include "src/utils/cpp/concepts.hpp"
+
+/// \brief Concept required for Runners used by the Traverser.
+template <class T>
+concept Runnable = requires(T const r,
+ DependencyGraph::ActionNode const* action,
+ DependencyGraph::ArtifactNode const* artifact) {
+ { r.Process(action) }
+ ->same_as<bool>;
+ { r.Process(artifact) }
+ ->same_as<bool>;
+};
+
+/// \brief Class to traverse the dependency graph executing necessary actions
+/// \tparam Executor Type of the executor
+// Traversal of the graph and execution of actions are concurrent, using
+/// the //src/buildtool/execution_engine/task_system.
+/// Graph remains constant and the only parts of the nodes that are modified are
+/// their traversal state
+template <Runnable Executor>
+class Traverser {
+ public:
+ Traverser(Executor const& r, DependencyGraph const& graph, std::size_t jobs)
+ : runner_{r}, graph_{graph}, tasker_{jobs} {}
+ Traverser() = delete;
+ Traverser(Traverser const&) = delete;
+ Traverser(Traverser&&) = delete;
+ auto operator=(Traverser const&) -> Traverser& = delete;
+ auto operator=(Traverser &&) -> Traverser& = delete;
+ ~Traverser() = default;
+
+ // Traverse the whole graph
+ [[nodiscard]] auto Traverse() noexcept -> bool {
+ auto const& ids = graph_.ArtifactIdentifiers();
+ return Traverse(ids);
+ };
+
+ // Traverse starting by the artifacts with the given identifiers, avoiding
+ // executing actions that are not strictly needed to build the given
+ // artifacs
+ [[nodiscard]] auto Traverse(
+ std::unordered_set<ArtifactIdentifier> const& target_ids) noexcept
+ -> bool;
+
+ private:
+ Executor const& runner_{};
+ DependencyGraph const& graph_;
+ TaskSystem tasker_{}; // THIS SHOULD BE THE LAST MEMBER VARIABLE
+
+ // Visits discover nodes and queue visits to their children nodes.
+ void Visit(gsl::not_null<DependencyGraph::ArtifactNode const*>
+ artifact_node) noexcept;
+ void Visit(
+ gsl::not_null<DependencyGraph::ActionNode const*> action_node) noexcept;
+
+ // Notify all actions that have it as a dependency that it is available and
+ // queue execution of those that become ready (that were only waiting for
+ // this artifact)
+ void NotifyAvailable(
+ gsl::not_null<DependencyGraph::ArtifactNode const*> const&
+ artifact_node) noexcept;
+
+ // Calls NotifyAvailable on all the action's outputs
+ void NotifyAvailable(
+ gsl::not_null<DependencyGraph::ActionNode const*> const&
+ action_node) noexcept;
+
+ // Visit to nodes are queued only once
+ template <typename NodeTypePtr>
+ void QueueVisit(NodeTypePtr node) noexcept {
+ // in case the node was already discovered, there is no need to queue
+ // the visit
+ if (node->TraversalState()->GetAndMarkDiscovered()) {
+ return;
+ }
+ tasker_.QueueTask([this, node]() noexcept { Visit(node); });
+ }
+
+ // Queue task to process the node by the executor after making sure that the
+ // node is required and that it was not yet queued to be processed. The task
+ // queued will call notify that the node is available in case processing it
+ // was successful
+ template <typename NodeTypePtr>
+ void QueueProcessing(NodeTypePtr node) noexcept {
+ if (not node->TraversalState()->IsRequired() or
+ node->TraversalState()->GetAndMarkQueuedToBeProcessed()) {
+ return;
+ }
+
+ auto process_node = [this, node]() {
+ if (runner_.Process(node)) {
+ NotifyAvailable(node);
+ }
+ else {
+ Logger::Log(LogLevel::Error, "Build failed.");
+ std::exit(EXIT_FAILURE);
+ }
+ };
+ tasker_.QueueTask(process_node);
+ }
+};
+
+template <Runnable Executor>
+auto Traverser<Executor>::Traverse(
+ std::unordered_set<ArtifactIdentifier> const& target_ids) noexcept -> bool {
+ for (auto artifact_id : target_ids) {
+ auto const* artifact_node = graph_.ArtifactNodeWithId(artifact_id);
+ if (artifact_node != nullptr) {
+ QueueVisit(artifact_node);
+ }
+ else {
+ Logger::Log(
+ LogLevel::Error,
+ "artifact with id {} can not be found in dependency graph.",
+ artifact_id);
+ return false;
+ }
+ }
+ return true;
+}
+
+template <Runnable Executor>
+void Traverser<Executor>::Visit(
+ gsl::not_null<DependencyGraph::ArtifactNode const*>
+ artifact_node) noexcept {
+ artifact_node->TraversalState()->MarkRequired();
+ // Visits are queued only once per artifact node, but it could be that the
+ // builder action had multiple outputs and was queued and executed through
+ // the visit to another of the outputs, in which case the current artifact
+ // would be available and there is nothing else to do
+ if (artifact_node->TraversalState()->IsAvailable()) {
+ return;
+ }
+
+ if (artifact_node->HasBuilderAction()) {
+ QueueVisit(gsl::not_null<DependencyGraph::ActionNode const*>(
+ artifact_node->BuilderActionNode()));
+ }
+ else {
+ QueueProcessing(artifact_node);
+ }
+}
+
+template <Runnable Executor>
+void Traverser<Executor>::Visit(
+ gsl::not_null<DependencyGraph::ActionNode const*> action_node) noexcept {
+ action_node->TraversalState()->MarkRequired();
+ for (auto const& dep : action_node->Children()) {
+ if (not dep->TraversalState()->IsAvailable()) {
+ QueueVisit(dep);
+ }
+ }
+
+ if (action_node->TraversalState()->IsReady()) {
+ QueueProcessing(action_node);
+ }
+}
+
+template <Runnable Executor>
+void Traverser<Executor>::NotifyAvailable(
+ gsl::not_null<DependencyGraph::ArtifactNode const*> const&
+ artifact_node) noexcept {
+ artifact_node->TraversalState()->MakeAvailable();
+ for (auto const& action_node : artifact_node->Parents()) {
+ if (action_node->TraversalState()->NotifyAvailableDepAndCheckReady()) {
+ QueueProcessing(action_node);
+ }
+ }
+}
+
+template <Runnable Executor>
+void Traverser<Executor>::NotifyAvailable(
+ gsl::not_null<DependencyGraph::ActionNode const*> const&
+ action_node) noexcept {
+ for (auto const& output : action_node->Parents()) {
+ NotifyAvailable(output);
+ }
+}
+
+#endif // INCLUDED_SRC_BUILDTOOL_EXECUTION_ENGINE_TRAVERSER_TRAVERSER_HPP
diff --git a/src/buildtool/file_system/TARGETS b/src/buildtool/file_system/TARGETS
new file mode 100644
index 00000000..478b5903
--- /dev/null
+++ b/src/buildtool/file_system/TARGETS
@@ -0,0 +1,79 @@
+{ "object_type":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["object_type"]
+ , "hdrs": ["object_type.hpp"]
+ , "stage": ["src", "buildtool", "file_system"]
+ }
+, "file_system_manager":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["file_system_manager"]
+ , "hdrs": ["file_system_manager.hpp"]
+ , "deps":
+ [ "object_type"
+ , ["src/buildtool/logging", "logging"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "file_system"]
+ }
+, "system_command":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["system_command"]
+ , "hdrs": ["system_command.hpp"]
+ , "deps":
+ [ "file_system_manager"
+ , ["src/buildtool/logging", "logging"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "file_system"]
+ }
+, "jsonfs":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["jsonfs"]
+ , "hdrs": ["jsonfs.hpp"]
+ , "deps": ["object_type", "file_system_manager", ["src/utils/cpp", "json"]]
+ , "stage": ["src", "buildtool", "file_system"]
+ }
+, "git_cas":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["git_cas"]
+ , "hdrs": ["git_cas.hpp"]
+ , "srcs": ["git_cas.cpp"]
+ , "deps":
+ [ "object_type"
+ , "file_system_manager"
+ , ["src/buildtool/logging", "logging"]
+ , ["src/utils/cpp", "hex_string"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ , ["", "libgit2"]
+ ]
+ , "stage": ["src", "buildtool", "file_system"]
+ }
+, "git_tree":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["git_tree"]
+ , "hdrs": ["git_tree.hpp"]
+ , "srcs": ["git_tree.cpp"]
+ , "deps":
+ [ "git_cas"
+ , "object_type"
+ , "file_system_manager"
+ , ["src/buildtool/logging", "logging"]
+ , ["src/utils/cpp", "atomic"]
+ , ["src/utils/cpp", "hex_string"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "file_system"]
+ }
+, "file_root":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["file_root"]
+ , "hdrs": ["file_root.hpp"]
+ , "deps":
+ [ "git_tree"
+ , "file_system_manager"
+ , ["src/buildtool/common", "artifact_description"]
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "file_system"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/file_system/file_root.hpp b/src/buildtool/file_system/file_root.hpp
new file mode 100644
index 00000000..1df40588
--- /dev/null
+++ b/src/buildtool/file_system/file_root.hpp
@@ -0,0 +1,239 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_FILE_ROOT_HPP
+#define INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_FILE_ROOT_HPP
+
+#include <filesystem>
+#include <memory>
+#include <string>
+#include <unordered_set>
+#include <variant>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/artifact_description.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/file_system/git_tree.hpp"
+
+class FileRoot {
+ using fs_root_t = std::filesystem::path;
+ struct git_root_t {
+ gsl::not_null<GitCASPtr> cas;
+ gsl::not_null<GitTreePtr> tree;
+ };
+ using root_t = std::variant<fs_root_t, git_root_t>;
+
+ public:
+ class DirectoryEntries {
+ using names_t = std::unordered_set<std::string>;
+ using tree_t = gsl::not_null<GitTree const*>;
+ using entries_t = std::variant<std::monostate, names_t, tree_t>;
+
+ public:
+ DirectoryEntries() noexcept = default;
+ explicit DirectoryEntries(names_t names) noexcept
+ : data_{std::move(names)} {}
+ explicit DirectoryEntries(tree_t git_tree) noexcept
+ : data_{std::move(git_tree)} {}
+ [[nodiscard]] auto Contains(std::string const& name) const noexcept
+ -> bool {
+ if (std::holds_alternative<tree_t>(data_)) {
+ return static_cast<bool>(
+ std::get<tree_t>(data_)->LookupEntryByName(name));
+ }
+ if (std::holds_alternative<names_t>(data_)) {
+ return std::get<names_t>(data_).contains(name);
+ }
+ return false;
+ }
+ [[nodiscard]] auto Empty() const noexcept -> bool {
+ if (std::holds_alternative<tree_t>(data_)) {
+ try {
+ auto const& tree = std::get<tree_t>(data_);
+ return tree->begin() == tree->end();
+ } catch (...) {
+ return false;
+ }
+ }
+ if (std::holds_alternative<names_t>(data_)) {
+ return std::get<names_t>(data_).empty();
+ }
+ return true;
+ }
+
+ private:
+ entries_t data_{};
+ };
+
+ FileRoot() noexcept = default;
+ explicit FileRoot(std::filesystem::path root) noexcept
+ : root_{std::move(root)} {}
+ FileRoot(gsl::not_null<GitCASPtr> cas,
+ gsl::not_null<GitTreePtr> tree) noexcept
+ : root_{git_root_t{std::move(cas), std::move(tree)}} {}
+
+ [[nodiscard]] static auto FromGit(std::filesystem::path const& repo_path,
+ std::string const& git_tree_id) noexcept
+ -> std::optional<FileRoot> {
+ if (auto cas = GitCAS::Open(repo_path)) {
+ if (auto tree = GitTree::Read(cas, git_tree_id)) {
+ try {
+ return FileRoot{
+ cas, std::make_shared<GitTree const>(std::move(*tree))};
+ } catch (...) {
+ }
+ }
+ }
+ return std::nullopt;
+ }
+
+ // Indicates that subsequent calls to `Exists()`, `IsFile()`,
+ // `IsDirectory()`, and `FileType()` on contents of the same directory will
+ // be served without any additional file system lookups.
+ [[nodiscard]] auto HasFastDirectoryLookup() const noexcept -> bool {
+ return std::holds_alternative<git_root_t>(root_);
+ }
+
+ [[nodiscard]] auto Exists(std::filesystem::path const& path) const noexcept
+ -> bool {
+ if (std::holds_alternative<git_root_t>(root_)) {
+ if (path == ".") {
+ return true;
+ }
+ return static_cast<bool>(
+ std::get<git_root_t>(root_).tree->LookupEntryByPath(path));
+ }
+ return FileSystemManager::Exists(std::get<fs_root_t>(root_) / path);
+ }
+
+ [[nodiscard]] auto IsFile(
+ std::filesystem::path const& file_path) const noexcept -> bool {
+ if (std::holds_alternative<git_root_t>(root_)) {
+ if (auto entry =
+ std::get<git_root_t>(root_).tree->LookupEntryByPath(
+ file_path)) {
+ return entry->IsBlob();
+ }
+ return false;
+ }
+ return FileSystemManager::IsFile(std::get<fs_root_t>(root_) /
+ file_path);
+ }
+
+ [[nodiscard]] auto IsDirectory(
+ std::filesystem::path const& dir_path) const noexcept -> bool {
+ if (std::holds_alternative<git_root_t>(root_)) {
+ if (dir_path == ".") {
+ return true;
+ }
+ if (auto entry =
+ std::get<git_root_t>(root_).tree->LookupEntryByPath(
+ dir_path)) {
+ return entry->IsTree();
+ }
+ return false;
+ }
+ return FileSystemManager::IsDirectory(std::get<fs_root_t>(root_) /
+ dir_path);
+ }
+
+ [[nodiscard]] auto ReadFile(std::filesystem::path const& file_path)
+ const noexcept -> std::optional<std::string> {
+ if (std::holds_alternative<git_root_t>(root_)) {
+ if (auto entry =
+ std::get<git_root_t>(root_).tree->LookupEntryByPath(
+ file_path)) {
+ return entry->Blob();
+ }
+ return std::nullopt;
+ }
+ return FileSystemManager::ReadFile(std::get<fs_root_t>(root_) /
+ file_path);
+ }
+
+ [[nodiscard]] auto ReadDirectory(std::filesystem::path const& dir_path)
+ const noexcept -> DirectoryEntries {
+ try {
+ if (std::holds_alternative<git_root_t>(root_)) {
+ auto const& tree = std::get<git_root_t>(root_).tree;
+ if (dir_path == ".") {
+ return DirectoryEntries{&(*tree)};
+ }
+ if (auto entry = tree->LookupEntryByPath(dir_path)) {
+ if (auto const& found_tree = entry->Tree()) {
+ return DirectoryEntries{&(*found_tree)};
+ }
+ }
+ }
+ else {
+ std::unordered_set<std::string> names{};
+ if (FileSystemManager::ReadDirectory(
+ std::get<fs_root_t>(root_) / dir_path,
+ [&names](auto name, auto /*type*/) {
+ names.emplace(name.string());
+ return true;
+ })) {
+ return DirectoryEntries{std::move(names)};
+ }
+ }
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "reading directory {} failed with:\n{}",
+ dir_path.string(),
+ ex.what());
+ }
+ return {};
+ }
+
+ [[nodiscard]] auto FileType(std::filesystem::path const& file_path)
+ const noexcept -> std::optional<ObjectType> {
+ if (std::holds_alternative<git_root_t>(root_)) {
+ if (auto entry =
+ std::get<git_root_t>(root_).tree->LookupEntryByPath(
+ file_path)) {
+ if (entry->IsBlob()) {
+ return entry->Type();
+ }
+ }
+ return std::nullopt;
+ }
+ auto type =
+ FileSystemManager::Type(std::get<fs_root_t>(root_) / file_path);
+ if (type and IsFileObject(*type)) {
+ return type;
+ }
+ return std::nullopt;
+ }
+
+ [[nodiscard]] auto ReadBlob(std::string const& blob_id) const noexcept
+ -> std::optional<std::string> {
+ if (std::holds_alternative<git_root_t>(root_)) {
+ return std::get<git_root_t>(root_).cas->ReadObject(
+ blob_id, /*is_hex_id=*/true);
+ }
+ return std::nullopt;
+ }
+
+ // Create LOCAL or KNOWN artifact. Does not check existence for LOCAL.
+ [[nodiscard]] auto ToArtifactDescription(
+ std::filesystem::path const& file_path,
+ std::string const& repository) const noexcept
+ -> std::optional<ArtifactDescription> {
+ if (std::holds_alternative<git_root_t>(root_)) {
+ if (auto entry =
+ std::get<git_root_t>(root_).tree->LookupEntryByPath(
+ file_path)) {
+ if (entry->IsBlob()) {
+ return ArtifactDescription{
+ ArtifactDigest{entry->Hash(), *entry->Size()},
+ entry->Type(),
+ repository};
+ }
+ }
+ return std::nullopt;
+ }
+ return ArtifactDescription{file_path, repository};
+ }
+
+ private:
+ root_t root_;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_FILE_ROOT_HPP
diff --git a/src/buildtool/file_system/file_system_manager.hpp b/src/buildtool/file_system/file_system_manager.hpp
new file mode 100644
index 00000000..75902890
--- /dev/null
+++ b/src/buildtool/file_system/file_system_manager.hpp
@@ -0,0 +1,565 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_FILE_SYSTEM_MANAGER_HPP
+#define INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_FILE_SYSTEM_MANAGER_HPP
+
+#include <cstdio> // for std::fopen
+#include <exception>
+#include <filesystem>
+#include <fstream>
+#include <optional>
+
+#ifdef __unix__
+#include <unistd.h>
+#endif
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+/// \brief Implements primitive file system functionality.
+/// Catches all exceptions for use with exception-free callers.
+class FileSystemManager {
+ public:
+ using ReadDirEntryFunc =
+ std::function<bool(std::filesystem::path const&, ObjectType type)>;
+
+ class DirectoryAnchor {
+ friend class FileSystemManager;
+
+ public:
+ DirectoryAnchor(DirectoryAnchor const&) = delete;
+ auto operator=(DirectoryAnchor const&) -> DirectoryAnchor& = delete;
+ auto operator=(DirectoryAnchor &&) -> DirectoryAnchor& = delete;
+ ~DirectoryAnchor() noexcept {
+ if (!kRestorePath.empty()) {
+ try {
+ std::filesystem::current_path(kRestorePath);
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ }
+ }
+ }
+ [[nodiscard]] auto GetRestorePath() const noexcept
+ -> std::filesystem::path const& {
+ return kRestorePath;
+ }
+
+ private:
+ std::filesystem::path const kRestorePath{};
+
+ DirectoryAnchor()
+ : kRestorePath{FileSystemManager::GetCurrentDirectory()} {}
+ DirectoryAnchor(DirectoryAnchor&&) = default;
+ };
+
+ [[nodiscard]] static auto GetCurrentDirectory() noexcept
+ -> std::filesystem::path {
+ try {
+ return std::filesystem::current_path();
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ return std::filesystem::path{};
+ }
+ }
+
+ [[nodiscard]] static auto ChangeDirectory(
+ std::filesystem::path const& dir) noexcept -> DirectoryAnchor {
+ DirectoryAnchor anchor{};
+ try {
+ std::filesystem::current_path(dir);
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "changing directory to {} from anchor {}:\n{}",
+ dir.string(),
+ anchor.GetRestorePath().string(),
+ e.what());
+ }
+ return anchor;
+ }
+
+ /// \brief Returns true if the directory was created or existed before.
+ [[nodiscard]] static auto CreateDirectory(
+ std::filesystem::path const& dir) noexcept -> bool {
+ return CreateDirectoryImpl(dir) != CreationStatus::Failed;
+ }
+
+ /// \brief Returns true if the directory was created by this call.
+ [[nodiscard]] static auto CreateDirectoryExclusive(
+ std::filesystem::path const& dir) noexcept -> bool {
+ return CreateDirectoryImpl(dir) == CreationStatus::Created;
+ }
+
+ /// \brief Returns true if the file was created or existed before.
+ [[nodiscard]] static auto CreateFile(
+ std::filesystem::path const& file) noexcept -> bool {
+ return CreateFileImpl(file) != CreationStatus::Failed;
+ }
+
+ /// \brief Returns true if the file was created by this call.
+ [[nodiscard]] static auto CreateFileExclusive(
+ std::filesystem::path const& file) noexcept -> bool {
+ return CreateFileImpl(file) == CreationStatus::Created;
+ }
+
+ [[nodiscard]] static auto CreateFileHardlink(
+ std::filesystem::path const& file_path,
+ std::filesystem::path const& link_path) noexcept -> bool {
+ try {
+ std::filesystem::create_hard_link(file_path, link_path);
+ return std::filesystem::is_regular_file(link_path);
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "hard linking {} to {}\n{}",
+ file_path.string(),
+ link_path.string(),
+ e.what());
+ return false;
+ }
+ }
+
+ [[nodiscard]] static auto Rename(std::filesystem::path const& src,
+ std::filesystem::path const& dst,
+ bool no_clobber = false) noexcept -> bool {
+ if (no_clobber) {
+#ifdef __unix__
+ return link(src.c_str(), dst.c_str()) == 0 and
+ unlink(src.c_str()) == 0;
+#else
+#error "Non-unix is not supported yet"
+#endif
+ }
+ try {
+ std::filesystem::rename(src, dst);
+ return true;
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ return false;
+ }
+ }
+
+ [[nodiscard]] static auto CopyFile(
+ std::filesystem::path const& src,
+ std::filesystem::path const& dst,
+ std::filesystem::copy_options opt =
+ std::filesystem::copy_options::overwrite_existing) noexcept
+ -> bool {
+ try {
+ return std::filesystem::copy_file(src, dst, opt);
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "copying file from {} to {}:\n{}",
+ src.string(),
+ dst.string(),
+ e.what());
+ return false;
+ }
+ }
+
+ template <ObjectType kType>
+ requires(IsFileObject(kType)) [[nodiscard]] static auto CopyFileAs(
+ std::filesystem::path const& src,
+ std::filesystem::path const& dst,
+ std::filesystem::copy_options opt =
+ std::filesystem::copy_options::overwrite_existing) noexcept
+ -> bool {
+ return CopyFile(src, dst, opt) and
+ SetFilePermissions(dst, IsExecutableObject(kType));
+ }
+
+ [[nodiscard]] static auto CopyFileAs(
+ std::filesystem::path const& src,
+ std::filesystem::path const& dst,
+ ObjectType type,
+ std::filesystem::copy_options opt =
+ std::filesystem::copy_options::overwrite_existing) noexcept
+ -> bool {
+ switch (type) {
+ case ObjectType::File:
+ return CopyFileAs<ObjectType::File>(src, dst, opt);
+ case ObjectType::Executable:
+ return CopyFileAs<ObjectType::Executable>(src, dst, opt);
+ case ObjectType::Tree:
+ break;
+ }
+
+ return false;
+ }
+
+ [[nodiscard]] static auto RemoveFile(
+ std::filesystem::path const& file) noexcept -> bool {
+ try {
+ if (!std::filesystem::exists(file)) {
+ return true;
+ }
+ if (!IsFile(file)) {
+ return false;
+ }
+ return std::filesystem::remove(file);
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "removing file from {}:\n{}",
+ file.string(),
+ e.what());
+ return false;
+ }
+ }
+
+ [[nodiscard]] static auto RemoveDirectory(std::filesystem::path const& dir,
+ bool recursively = false) noexcept
+ -> bool {
+ try {
+ if (!std::filesystem::exists(dir)) {
+ return true;
+ }
+ if (recursively) {
+ return (std::filesystem::remove_all(dir) !=
+ static_cast<uintmax_t>(-1));
+ }
+ return std::filesystem::remove(dir);
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "removing directory {}:\n{}",
+ dir.string(),
+ e.what());
+ return false;
+ }
+ }
+
+ [[nodiscard]] static auto ResolveSymlinks(
+ gsl::not_null<std::filesystem::path*> const& path) noexcept -> bool {
+ try {
+ while (std::filesystem::is_symlink(*path)) {
+ *path = std::filesystem::read_symlink(*path);
+ }
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ return false;
+ }
+
+ return true;
+ }
+
+ [[nodiscard]] static auto Exists(std::filesystem::path const& path) noexcept
+ -> bool {
+ try {
+ return std::filesystem::exists(path);
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "checking for existence of path{}:\n{}",
+ path.string(),
+ e.what());
+ return false;
+ }
+
+ return true;
+ }
+
+ [[nodiscard]] static auto IsFile(std::filesystem::path const& file) noexcept
+ -> bool {
+ try {
+ if (!std::filesystem::is_regular_file(file)) {
+ return false;
+ }
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "checking if path {} corresponds to a file:\n{}",
+ file.string(),
+ e.what());
+ return false;
+ }
+
+ return true;
+ }
+
+ [[nodiscard]] static auto IsDirectory(
+ std::filesystem::path const& dir) noexcept -> bool {
+ try {
+ return std::filesystem::is_directory(dir);
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "checking if path {} corresponds to a directory:\n{}",
+ dir.string(),
+ e.what());
+ return false;
+ }
+
+ return true;
+ }
+
+ /// \brief Checks whether a path corresponds to an executable or not.
+ /// \param[in] path Path to check
+ /// \param[in] is_file_known (Optional) If true, we assume that the path
+ /// corresponds to a file, if false, we check if it's a file or not first.
+ /// Default value is false
+ /// \returns true if path corresponds to an executable object, false
+ /// otherwise
+ [[nodiscard]] static auto IsExecutable(std::filesystem::path const& path,
+ bool is_file_known = false) noexcept
+ -> bool {
+ if (not is_file_known and not IsFile(path)) {
+ return false;
+ }
+
+ try {
+ namespace fs = std::filesystem;
+ auto exec_flags = fs::perms::owner_exec bitor
+ fs::perms::group_exec bitor
+ fs::perms::others_exec;
+ auto exec_perms = fs::status(path).permissions() bitand exec_flags;
+ if (exec_perms == fs::perms::none) {
+ return false;
+ }
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "checking if path {} corresponds to an executable:\n{}",
+ path.string(),
+ e.what());
+ return false;
+ }
+
+ return true;
+ }
+
+ /// \brief Gets type of object in path according to file system
+ [[nodiscard]] static auto Type(std::filesystem::path const& path) noexcept
+ -> std::optional<ObjectType> {
+ if (IsFile(path)) {
+ if (IsExecutable(path, true)) {
+ return ObjectType::Executable;
+ }
+ return ObjectType::File;
+ }
+ if (IsDirectory(path)) {
+ return ObjectType::Tree;
+ }
+ Logger::Log(LogLevel::Debug,
+ "object type for {} not supported yet.",
+ path.string());
+ return std::nullopt;
+ }
+
+ [[nodiscard]] static auto ReadFile(
+ std::filesystem::path const& file) noexcept
+ -> std::optional<std::string> {
+ auto const type = Type(file);
+ if (not type) {
+ Logger::Log(LogLevel::Debug,
+ "{} can not be read because it is not a file.",
+ file.string());
+ return std::nullopt;
+ }
+ return ReadFile(file, *type);
+ }
+
+ [[nodiscard]] static auto ReadFile(std::filesystem::path const& file,
+ ObjectType type) noexcept
+ -> std::optional<std::string> {
+ if (not IsFileObject(type)) {
+ Logger::Log(LogLevel::Debug,
+ "{} can not be read because it is not a file.",
+ file.string());
+ return std::nullopt;
+ }
+ try {
+ std::string chunk{};
+ std::string content{};
+ chunk.resize(kChunkSize);
+ std::ifstream file_reader(file.string(), std::ios::binary);
+ if (file_reader.is_open()) {
+ auto ssize = gsl::narrow<std::streamsize>(chunk.size());
+ do {
+ file_reader.read(chunk.data(), ssize);
+ auto count = file_reader.gcount();
+ if (count == ssize) {
+ content += chunk;
+ }
+ else {
+ content +=
+ chunk.substr(0, gsl::narrow<std::size_t>(count));
+ }
+ } while (file_reader.good());
+ file_reader.close();
+ return content;
+ }
+ return std::nullopt;
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "reading file {}:\n{}",
+ file.string(),
+ e.what());
+ return std::nullopt;
+ }
+ }
+
+ [[nodiscard]] static auto ReadDirectory(
+ std::filesystem::path const& dir,
+ ReadDirEntryFunc const& read_entry) noexcept -> bool {
+ try {
+ for (auto const& entry : std::filesystem::directory_iterator{dir}) {
+ std::optional<ObjectType> type{};
+ if (entry.is_regular_file()) {
+ type = Type(entry.path());
+ }
+ if (entry.is_directory()) {
+ type = ObjectType::Tree;
+ }
+ if (not type) {
+ Logger::Log(LogLevel::Error,
+ "unsupported type for dir entry {}",
+ entry.path().string());
+ return false;
+ }
+ read_entry(entry.path().filename(), *type);
+ }
+ } catch (std::exception const& ex) {
+ Logger::Log(
+ LogLevel::Error, "reading directory {} failed", dir.string());
+ return false;
+ }
+ return true;
+ }
+
+ [[nodiscard]] static auto WriteFile(
+ std::string const& content,
+ std::filesystem::path const& file) noexcept -> auto {
+ if (not CreateDirectory(file.parent_path())) {
+ Logger::Log(LogLevel::Error,
+ "can not create directory {}",
+ file.parent_path().string());
+ return false;
+ }
+ try {
+ std::ofstream writer{file};
+ if (!writer.is_open()) {
+ Logger::Log(
+ LogLevel::Error, "can not open file {}", file.string());
+ return false;
+ }
+ writer << content;
+ writer.close();
+ return true;
+ } catch (std::exception const& e) {
+ Logger::Log(
+ LogLevel::Error, "writing to {}:\n{}", file.string(), e.what());
+ return false;
+ }
+ }
+
+ template <ObjectType kType>
+ requires(IsFileObject(kType)) [[nodiscard]] static auto WriteFileAs(
+ std::string const& content,
+ std::filesystem::path const& file) noexcept -> bool {
+ return WriteFile(content, file) and
+ SetFilePermissions(file, IsExecutableObject(kType));
+ }
+
+ [[nodiscard]] static auto WriteFileAs(std::string const& content,
+ std::filesystem::path const& file,
+ ObjectType output_type) noexcept
+ -> bool {
+ switch (output_type) {
+ case ObjectType::File:
+ return WriteFileAs<ObjectType::File>(content, file);
+ case ObjectType::Executable:
+ return WriteFileAs<ObjectType::Executable>(content, file);
+ case ObjectType::Tree:
+ return false;
+ }
+ }
+
+ [[nodiscard]] static auto IsRelativePath(
+ std::filesystem::path const& path) noexcept -> bool {
+ try {
+ return path.is_relative();
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ return false;
+ }
+ }
+
+ [[nodiscard]] static auto IsAbsolutePath(
+ std::filesystem::path const& path) noexcept -> bool {
+ try {
+ return path.is_absolute();
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ return false;
+ }
+ }
+
+ private:
+ enum class CreationStatus { Created, Exists, Failed };
+
+ static constexpr std::size_t kChunkSize{256};
+
+ /// \brief Race condition free directory creation.
+ /// Solves the TOCTOU issue.
+ [[nodiscard]] static auto CreateDirectoryImpl(
+ std::filesystem::path const& dir) noexcept -> CreationStatus {
+ try {
+ if (std::filesystem::is_directory(dir)) {
+ return CreationStatus::Exists;
+ }
+ if (std::filesystem::create_directories(dir)) {
+ return CreationStatus::Created;
+ }
+ // It could be that another thread has created the directory right
+ // after the current thread checked if it existed. For that reason,
+ // we try to create it and check if it exists if create_directories
+ // was not successful.
+ if (std::filesystem::is_directory(dir)) {
+ return CreationStatus::Exists;
+ }
+
+ return CreationStatus::Failed;
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ return CreationStatus::Failed;
+ }
+ }
+
+ /// \brief Race condition free file creation.
+ /// Solves the TOCTOU issue via C11's std::fopen.
+ [[nodiscard]] static auto CreateFileImpl(
+ std::filesystem::path const& file) noexcept -> CreationStatus {
+ try {
+ if (std::filesystem::is_regular_file(file)) {
+ return CreationStatus::Exists;
+ }
+ if (gsl::owner<FILE*> fp = std::fopen(file.c_str(), "wx")) {
+ std::fclose(fp);
+ return CreationStatus::Created;
+ }
+ // It could be that another thread has created the file right after
+ // the current thread checked if it existed. For that reason, we try
+ // to create it and check if it exists if fopen() with exclusive bit
+ // was not successful.
+ if (std::filesystem::is_regular_file(file)) {
+ return CreationStatus::Exists;
+ }
+ return CreationStatus::Failed;
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ return CreationStatus::Failed;
+ }
+ }
+
+ /// \brief Set special permissions for files.
+ /// Set to 0444 for non-executables and set to 0555 for executables.
+ static auto SetFilePermissions(std::filesystem::path const& path,
+ bool is_executable) noexcept -> bool {
+ try {
+ using std::filesystem::perms;
+ perms p{perms::owner_read | perms::group_read | perms::others_read};
+ if (is_executable) {
+ p |= perms::owner_exec | perms::group_exec | perms::others_exec;
+ }
+ std::filesystem::permissions(path, p);
+ return true;
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ return false;
+ }
+ }
+}; // class FileSystemManager
+
+#endif // INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_FILE_SYSTEM_MANAGER_HPP
diff --git a/src/buildtool/file_system/git_cas.cpp b/src/buildtool/file_system/git_cas.cpp
new file mode 100644
index 00000000..e77d8d6e
--- /dev/null
+++ b/src/buildtool/file_system/git_cas.cpp
@@ -0,0 +1,180 @@
+#include "src/buildtool/file_system/git_cas.hpp"
+
+#include <sstream>
+
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/logging/logger.hpp"
+#include "src/utils/cpp/hex_string.hpp"
+
+extern "C" {
+#include <git2.h>
+}
+
+namespace {
+
+constexpr auto kOIDRawSize{GIT_OID_RAWSZ};
+constexpr auto kOIDHexSize{GIT_OID_HEXSZ};
+
+[[nodiscard]] auto GitLastError() noexcept -> std::string {
+ git_error const* err{nullptr};
+ if ((err = git_error_last()) != nullptr and err->message != nullptr) {
+ return fmt::format("error code {}: {}", err->klass, err->message);
+ }
+ return "<unknown error>";
+}
+
+[[nodiscard]] auto GitObjectID(std::string const& id,
+ bool is_hex_id = false) noexcept
+ -> std::optional<git_oid> {
+ if ((is_hex_id and id.size() < kOIDHexSize) or id.size() < kOIDRawSize) {
+ Logger::Log(LogLevel::Error,
+ "invalid git object id {}",
+ is_hex_id ? id : ToHexString(id));
+ return std::nullopt;
+ }
+ git_oid oid{};
+ if (is_hex_id and git_oid_fromstr(&oid, id.data()) == 0) {
+ return oid;
+ }
+ if (not is_hex_id and
+ git_oid_fromraw(
+ &oid,
+ reinterpret_cast<unsigned char const*>(id.data()) // NOLINT
+ ) == 0) {
+ return oid;
+ }
+ Logger::Log(LogLevel::Error,
+ "parsing git object id {} failed with:\n{}",
+ is_hex_id ? id : ToHexString(id),
+ GitLastError());
+ return std::nullopt;
+}
+
+[[nodiscard]] auto GitTypeToObjectType(git_object_t const& type) noexcept
+ -> std::optional<ObjectType> {
+ switch (type) {
+ case GIT_OBJECT_BLOB:
+ return ObjectType::File;
+ case GIT_OBJECT_TREE:
+ return ObjectType::Tree;
+ default:
+ Logger::Log(LogLevel::Error,
+ "unsupported git object type {}",
+ git_object_type2string(type));
+ return std::nullopt;
+ }
+}
+
+} // namespace
+
+auto GitCAS::Open(std::filesystem::path const& repo_path) noexcept
+ -> GitCASPtr {
+ try {
+ auto cas = std::make_shared<GitCAS>();
+ if (cas->OpenODB(repo_path)) {
+ return std::static_pointer_cast<GitCAS const>(cas);
+ }
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "opening git object database failed with:\n{}",
+ ex.what());
+ }
+ return nullptr;
+}
+
+GitCAS::GitCAS() noexcept {
+ if (not(initialized_ = (git_libgit2_init() >= 0))) {
+ Logger::Log(LogLevel::Error, "initializing libgit2 failed");
+ }
+}
+GitCAS::~GitCAS() noexcept {
+ if (odb_ != nullptr) {
+ git_odb_free(odb_);
+ odb_ = nullptr;
+ }
+ if (initialized_) {
+ git_libgit2_shutdown();
+ }
+}
+
+auto GitCAS::ReadObject(std::string const& id, bool is_hex_id) const noexcept
+ -> std::optional<std::string> {
+ if (not initialized_) {
+ return std::nullopt;
+ }
+
+ auto oid = GitObjectID(id, is_hex_id);
+ if (not oid) {
+ return std::nullopt;
+ }
+
+ git_odb_object* obj = nullptr;
+ if (git_odb_read(&obj, odb_, &oid.value()) != 0) {
+ Logger::Log(LogLevel::Error,
+ "reading git object {} from database failed with:\n{}",
+ is_hex_id ? id : ToHexString(id),
+ GitLastError());
+ return std::nullopt;
+ }
+
+ std::string data(static_cast<char const*>(git_odb_object_data(obj)),
+ git_odb_object_size(obj));
+ git_odb_object_free(obj);
+
+ return data;
+}
+
+auto GitCAS::ReadHeader(std::string const& id, bool is_hex_id) const noexcept
+ -> std::optional<std::pair<std::size_t, ObjectType>> {
+ if (not initialized_) {
+ return std::nullopt;
+ }
+
+ auto oid = GitObjectID(id, is_hex_id);
+ if (not oid) {
+ return std::nullopt;
+ }
+
+ std::size_t size{};
+ git_object_t type{};
+ if (git_odb_read_header(&size, &type, odb_, &oid.value()) != 0) {
+ Logger::Log(LogLevel::Error,
+ "reading git object header {} from database failed "
+ "with:\n{}",
+ is_hex_id ? id : ToHexString(id),
+ GitLastError());
+ return std::nullopt;
+ }
+
+ if (auto obj_type = GitTypeToObjectType(type)) {
+ return std::make_pair(size, *obj_type);
+ }
+
+ return std::nullopt;
+}
+
+auto GitCAS::OpenODB(std::filesystem::path const& repo_path) noexcept -> bool {
+ if (initialized_) {
+ { // lock as git_repository API has no thread-safety guarantees
+ std::unique_lock lock{repo_mutex_};
+ git_repository* repo = nullptr;
+ if (git_repository_open(&repo, repo_path.c_str()) != 0) {
+ Logger::Log(LogLevel::Error,
+ "opening git repository {} failed with:\n{}",
+ repo_path.string(),
+ GitLastError());
+ return false;
+ }
+ git_repository_odb(&odb_, repo);
+ git_repository_free(repo);
+ }
+ if (odb_ == nullptr) {
+ Logger::Log(LogLevel::Error,
+ "obtaining git object database {} failed with:\n{}",
+ repo_path.string(),
+ GitLastError());
+ return false;
+ }
+ }
+ return initialized_;
+}
diff --git a/src/buildtool/file_system/git_cas.hpp b/src/buildtool/file_system/git_cas.hpp
new file mode 100644
index 00000000..d4341482
--- /dev/null
+++ b/src/buildtool/file_system/git_cas.hpp
@@ -0,0 +1,60 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_GIT_CAS_HPP
+#define INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_GIT_CAS_HPP
+
+#include <filesystem>
+#include <memory>
+#include <mutex>
+#include <optional>
+
+#include "src/buildtool/file_system/object_type.hpp"
+
+extern "C" {
+using git_odb = struct git_odb;
+}
+
+class GitCAS;
+using GitCASPtr = std::shared_ptr<GitCAS const>;
+
+/// \brief Git CAS that maintains its own libgit2 global state.
+class GitCAS {
+ public:
+ static auto Open(std::filesystem::path const& repo_path) noexcept
+ -> GitCASPtr;
+
+ GitCAS() noexcept;
+ ~GitCAS() noexcept;
+
+ // prohibit moves and copies
+ GitCAS(GitCAS const&) = delete;
+ GitCAS(GitCAS&& other) = delete;
+ auto operator=(GitCAS const&) = delete;
+ auto operator=(GitCAS&& other) = delete;
+
+ /// \brief Read object from CAS.
+ /// \param id The object id.
+ /// \param is_hex_id Specify whether `id` is hex string or raw.
+ [[nodiscard]] auto ReadObject(std::string const& id,
+ bool is_hex_id = false) const noexcept
+ -> std::optional<std::string>;
+
+ /// \brief Read object header from CAS.
+ /// \param id The object id.
+ /// \param is_hex_id Specify whether `id` is hex string or raw.
+ // Use with care. Quote from git2/odb.h:138:
+ // Note that most backends do not support reading only the header of an
+ // object, so the whole object will be read and then the header will be
+ // returned.
+ [[nodiscard]] auto ReadHeader(std::string const& id,
+ bool is_hex_id = false) const noexcept
+ -> std::optional<std::pair<std::size_t, ObjectType>>;
+
+ private:
+ static inline std::mutex repo_mutex_{};
+ git_odb* odb_{nullptr};
+ bool initialized_{false};
+
+ [[nodiscard]] auto OpenODB(std::filesystem::path const& repo_path) noexcept
+ -> bool;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_GIT_CAS_HPP
diff --git a/src/buildtool/file_system/git_tree.cpp b/src/buildtool/file_system/git_tree.cpp
new file mode 100644
index 00000000..3e98ead1
--- /dev/null
+++ b/src/buildtool/file_system/git_tree.cpp
@@ -0,0 +1,178 @@
+#include "src/buildtool/file_system/git_tree.hpp"
+
+#include <sstream>
+
+#include "src/buildtool/logging/logger.hpp"
+
+extern "C" {
+#include <git2.h>
+}
+
+namespace {
+
+constexpr auto kOIDRawSize{GIT_OID_RAWSZ};
+
+auto const kLoadTreeError =
+ std::make_shared<std::optional<GitTree>>(std::nullopt);
+
+[[nodiscard]] auto PermToType(std::string const& perm_str) noexcept
+ -> std::optional<ObjectType> {
+ constexpr auto kPermBase = 8;
+ constexpr auto kTreePerm = 040000;
+ constexpr auto kFilePerm = 0100644;
+ constexpr auto kExecPerm = 0100755;
+ constexpr auto kLinkPerm = 0120000;
+
+ int perm = std::stoi(perm_str, nullptr, kPermBase);
+
+ switch (perm) {
+ case kTreePerm:
+ return ObjectType::Tree;
+ case kFilePerm:
+ return ObjectType::File;
+ case kExecPerm:
+ return ObjectType::Executable;
+ case kLinkPerm:
+ Logger::Log(LogLevel::Error, "symlinks are not yet supported");
+ return std::nullopt;
+ default:
+ Logger::Log(LogLevel::Error, "unsupported permission {}", perm_str);
+ return std::nullopt;
+ }
+}
+
+auto ParseRawTreeObject(GitCASPtr const& cas,
+ std::string const& raw_tree) noexcept
+ -> std::optional<GitTree::entries_t> {
+ std::string perm{};
+ std::string path{};
+ std::string hash(kOIDRawSize, '\0');
+ std::istringstream iss{raw_tree};
+ GitTree::entries_t entries{};
+ // raw tree format is: "<perm> <path>\0<hash>[next entries...]"
+ while (std::getline(iss, perm, ' ') and // <perm>
+ std::getline(iss, path, '\0') and // <path>
+ iss.read(hash.data(), // <hash>
+ static_cast<std::streamsize>(hash.size()))) {
+ auto type = PermToType(perm);
+ if (not type) {
+ return std::nullopt;
+ }
+ try {
+ entries.emplace(path,
+ std::make_shared<GitTreeEntry>(cas, hash, *type));
+ } catch (std::exception const& ex) {
+ Logger::Log(LogLevel::Error,
+ "parsing git raw tree object failed with:\n{}",
+ ex.what());
+ return std::nullopt;
+ }
+ }
+ return entries;
+}
+
+// resolve '.' and '..' in path.
+[[nodiscard]] auto ResolveRelativePath(
+ std::filesystem::path const& path) noexcept -> std::filesystem::path {
+ auto normalized = path.lexically_normal();
+ return (normalized / "").parent_path(); // strip trailing slash
+}
+
+[[nodiscard]] auto LookupEntryPyPath(
+ GitTree const& tree,
+ std::filesystem::path::const_iterator it,
+ std::filesystem::path::const_iterator const& end) noexcept
+ -> GitTreeEntryPtr {
+ auto segment = *it;
+ auto entry = tree.LookupEntryByName(segment);
+ if (not entry) {
+ return nullptr;
+ }
+ if (++it != end) {
+ if (not entry->IsTree()) {
+ return nullptr;
+ }
+ return LookupEntryPyPath(*entry->Tree(), it, end);
+ }
+ return entry;
+}
+
+} // namespace
+
+auto GitTree::Read(std::filesystem::path const& repo_path,
+ std::string const& tree_id) noexcept
+ -> std::optional<GitTree> {
+ auto cas = GitCAS::Open(repo_path);
+ if (not cas) {
+ return std::nullopt;
+ }
+ return Read(cas, tree_id);
+}
+
+auto GitTree::Read(gsl::not_null<GitCASPtr> const& cas,
+ std::string const& tree_id) noexcept
+ -> std::optional<GitTree> {
+ auto obj = cas->ReadObject(tree_id, /*is_hex_id=*/true);
+ if (not obj) {
+ return std::nullopt;
+ }
+ auto entries = ParseRawTreeObject(cas, *obj);
+ if (not entries) {
+ return std::nullopt;
+ }
+ return GitTree{cas, std::move(*entries)};
+}
+
+auto GitTree::LookupEntryByName(std::string const& name) const noexcept
+ -> GitTreeEntryPtr {
+ auto entry_it = entries_.find(name);
+ if (entry_it == entries_.end()) {
+ Logger::Log(
+ LogLevel::Error, "git tree does not contain entry {}", name);
+ return nullptr;
+ }
+ return entry_it->second;
+}
+
+auto GitTree::LookupEntryByPath(
+ std::filesystem::path const& path) const noexcept -> GitTreeEntryPtr {
+ auto resolved = ResolveRelativePath(path);
+ return LookupEntryPyPath(*this, resolved.begin(), resolved.end());
+}
+
+auto GitTreeEntry::Blob() const noexcept -> std::optional<std::string> {
+ if (not IsBlob()) {
+ return std::nullopt;
+ }
+ return cas_->ReadObject(raw_id_);
+}
+
+auto GitTreeEntry::Tree() const& noexcept -> std::optional<GitTree> const& {
+ auto ptr = tree_cached_.load();
+ if (not ptr) {
+ if (not tree_loading_.exchange(true)) {
+ ptr = kLoadTreeError;
+ std::optional<std::string> obj{};
+ if (IsTree() and (obj = cas_->ReadObject(raw_id_))) {
+ if (auto entries = ParseRawTreeObject(cas_, *obj)) {
+ ptr = std::make_shared<std::optional<GitTree>>(
+ GitTree{cas_, std::move(*entries)});
+ }
+ }
+ tree_cached_.store(ptr);
+ tree_cached_.notify_all();
+ }
+ else {
+ tree_cached_.wait(nullptr);
+ ptr = tree_cached_.load();
+ }
+ }
+ return *ptr;
+}
+
+auto GitTreeEntry::Size() const noexcept -> std::optional<std::size_t> {
+ if (auto header = cas_->ReadHeader(raw_id_)) {
+ return header->first;
+ }
+ return std::nullopt;
+}
diff --git a/src/buildtool/file_system/git_tree.hpp b/src/buildtool/file_system/git_tree.hpp
new file mode 100644
index 00000000..57cb3b52
--- /dev/null
+++ b/src/buildtool/file_system/git_tree.hpp
@@ -0,0 +1,87 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_GIT_TREE_HPP
+#define INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_GIT_TREE_HPP
+
+#include <filesystem>
+#include <optional>
+#include <unordered_map>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/file_system/git_cas.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+#include "src/utils/cpp/atomic.hpp"
+#include "src/utils/cpp/hex_string.hpp"
+
+class GitTreeEntry;
+using GitTreeEntryPtr = std::shared_ptr<GitTreeEntry const>;
+
+class GitTree {
+ friend class GitTreeEntry;
+
+ public:
+ using entries_t =
+ std::unordered_map<std::string, gsl::not_null<GitTreeEntryPtr>>;
+
+ /// \brief Read tree with given id from Git repository.
+ /// \param repo_path Path to the Git repository.
+ /// \param tree_id Tree id as as hex string.
+ [[nodiscard]] static auto Read(std::filesystem::path const& repo_path,
+ std::string const& tree_id) noexcept
+ -> std::optional<GitTree>;
+
+ /// \brief Read tree with given id from CAS.
+ /// \param cas Git CAS that contains the tree id.
+ /// \param tree_id Tree id as as hex string.
+ [[nodiscard]] static auto Read(gsl::not_null<GitCASPtr> const& cas,
+ std::string const& tree_id) noexcept
+ -> std::optional<GitTree>;
+
+ /// \brief Lookup by dir entry name. '.' and '..' are not allowed.
+ [[nodiscard]] auto LookupEntryByName(std::string const& name) const noexcept
+ -> GitTreeEntryPtr;
+
+ /// \brief Lookup by relative path. '.' is not allowed.
+ [[nodiscard]] auto LookupEntryByPath(
+ std::filesystem::path const& path) const noexcept -> GitTreeEntryPtr;
+
+ [[nodiscard]] auto begin() const noexcept { return entries_.begin(); }
+ [[nodiscard]] auto end() const noexcept { return entries_.end(); }
+
+ private:
+ gsl::not_null<GitCASPtr> cas_;
+ entries_t entries_;
+
+ GitTree(gsl::not_null<GitCASPtr> cas, entries_t&& entries) noexcept
+ : cas_{std::move(cas)}, entries_{std::move(entries)} {}
+};
+
+class GitTreeEntry {
+ public:
+ GitTreeEntry(gsl::not_null<GitCASPtr> cas,
+ std::string raw_id,
+ ObjectType type) noexcept
+ : cas_{std::move(cas)}, raw_id_{std::move(raw_id)}, type_{type} {}
+
+ [[nodiscard]] auto IsBlob() const noexcept { return IsFileObject(type_); }
+ [[nodiscard]] auto IsTree() const noexcept { return IsTreeObject(type_); }
+
+ [[nodiscard]] auto Blob() const noexcept -> std::optional<std::string>;
+ [[nodiscard]] auto Tree() && = delete;
+ [[nodiscard]] auto Tree() const& noexcept -> std::optional<GitTree> const&;
+
+ [[nodiscard]] auto Hash() const noexcept { return ToHexString(raw_id_); }
+ [[nodiscard]] auto Type() const noexcept { return type_; }
+ // Use with care. Implementation might read entire object to obtain size.
+ // Consider using Blob()->size() instead.
+ [[nodiscard]] auto Size() const noexcept -> std::optional<std::size_t>;
+
+ private:
+ gsl::not_null<GitCASPtr> cas_;
+ std::string raw_id_;
+ ObjectType type_;
+ mutable atomic_shared_ptr<std::optional<GitTree>> tree_cached_{nullptr};
+ mutable std::atomic<bool> tree_loading_{false};
+};
+
+using GitTreePtr = std::shared_ptr<GitTree const>;
+
+#endif // INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_GIT_TREE_HPP
diff --git a/src/buildtool/file_system/jsonfs.hpp b/src/buildtool/file_system/jsonfs.hpp
new file mode 100644
index 00000000..0ec381c7
--- /dev/null
+++ b/src/buildtool/file_system/jsonfs.hpp
@@ -0,0 +1,47 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_JSONFS_HPP
+#define INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_JSONFS_HPP
+
+#include <exception>
+#include <filesystem>
+#include <fstream>
+#include <optional>
+
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+#include "src/utils/cpp/json.hpp"
+
+class Json {
+ public:
+ // Note that we are not using std::pair<std::nlohmann, bool> and being
+ // coherent with FileSystemManager::ReadFile because there is a bug in llvm
+ // toolchain related to type_traits that does not allow us to use
+ // std::pair<T,U> where T or U are nlohmann::json.
+ // LLVM bug report: https://bugs.llvm.org/show_bug.cgi?id=48507
+ // Minimal example: https://godbolt.org/z/zacedsGzo
+ [[nodiscard]] static auto ReadFile(
+ std::filesystem::path const& file) noexcept
+ -> std::optional<nlohmann::json> {
+ auto const type = FileSystemManager::Type(file);
+ if (not type or not IsFileObject(*type)) {
+ Logger::Log(LogLevel::Debug,
+ "{} can not be read because it is not a file.",
+ file.string());
+ return std::nullopt;
+ }
+ try {
+ nlohmann::json content;
+ std::ifstream file_reader(file.string());
+ if (file_reader.is_open()) {
+ file_reader >> content;
+ return content;
+ }
+ return std::nullopt;
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error, e.what());
+ return std::nullopt;
+ }
+ }
+
+}; // Class Json
+
+#endif // INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_JSONFS_HPP
diff --git a/src/buildtool/file_system/object_type.hpp b/src/buildtool/file_system/object_type.hpp
new file mode 100644
index 00000000..6209f05d
--- /dev/null
+++ b/src/buildtool/file_system/object_type.hpp
@@ -0,0 +1,44 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_OBJECT_TYPE_HPP
+#define INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_OBJECT_TYPE_HPP
+
+enum class ObjectType {
+ File,
+ Executable,
+ Tree,
+};
+
+[[nodiscard]] constexpr auto FromChar(char c) -> ObjectType {
+ switch (c) {
+ case 'x':
+ return ObjectType::Executable;
+ case 't':
+ return ObjectType::Tree;
+ default:
+ return ObjectType::File;
+ }
+}
+
+[[nodiscard]] constexpr auto ToChar(ObjectType type) -> char {
+ switch (type) {
+ case ObjectType::File:
+ return 'f';
+ case ObjectType::Executable:
+ return 'x';
+ case ObjectType::Tree:
+ return 't';
+ }
+}
+
+[[nodiscard]] constexpr auto IsFileObject(ObjectType type) -> bool {
+ return type == ObjectType::Executable or type == ObjectType::File;
+}
+
+[[nodiscard]] constexpr auto IsExecutableObject(ObjectType type) -> bool {
+ return type == ObjectType::Executable;
+}
+
+[[nodiscard]] constexpr auto IsTreeObject(ObjectType type) -> bool {
+ return type == ObjectType::Tree;
+}
+
+#endif // INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_OBJECT_TYPE_HPP
diff --git a/src/buildtool/file_system/system_command.hpp b/src/buildtool/file_system/system_command.hpp
new file mode 100644
index 00000000..66470ade
--- /dev/null
+++ b/src/buildtool/file_system/system_command.hpp
@@ -0,0 +1,202 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_EXECUTION_SYSTEM_HPP
+#define INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_EXECUTION_SYSTEM_HPP
+
+#include <array>
+#include <cstdio>
+#include <cstring> // for strerror()
+#include <iterator>
+#include <map>
+#include <optional>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include <sys/wait.h>
+#include <unistd.h>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+/// \brief Execute system commands and obtain stdout, stderr and return value.
+/// Subsequent commands are context free and are not affected by previous
+/// commands. This class is not thread-safe.
+class SystemCommand {
+ public:
+ struct ExecOutput {
+ int return_value{};
+ std::filesystem::path stdout_file{};
+ std::filesystem::path stderr_file{};
+ };
+
+ /// \brief Create execution system with name.
+ explicit SystemCommand(std::string name) : logger_{std::move(name)} {}
+
+ /// \brief Execute command and arguments.
+ /// \param argv argv vector with the command to execute
+ /// \param env Environment variables set for execution.
+ /// \param cwd Working directory for execution.
+ /// \param tmpdir Temporary directory for storing stdout/stderr files.
+ /// \returns std::nullopt if there was an error in the execution setup
+ /// outside running the command itself, SystemCommand::ExecOutput otherwise.
+ [[nodiscard]] auto Execute(std::vector<std::string> argv,
+ std::map<std::string, std::string> env,
+ std::filesystem::path const& cwd,
+ std::filesystem::path const& tmpdir) noexcept
+ -> std::optional<ExecOutput> {
+ if (not FileSystemManager::IsDirectory(tmpdir)) {
+ logger_.Emit(LogLevel::Error,
+ "Temporary directory does not exist {}",
+ tmpdir.string());
+ return std::nullopt;
+ }
+
+ if (argv.empty()) {
+ logger_.Emit(LogLevel::Error, "Command cannot be empty.");
+ return std::nullopt;
+ }
+
+ std::vector<char*> cmd = UnwrapStrings(&argv);
+
+ std::vector<std::string> env_string{};
+ std::transform(std::begin(env),
+ std::end(env),
+ std::back_inserter(env_string),
+ [](auto& name_value) {
+ return name_value.first + "=" + name_value.second;
+ });
+ std::vector<char*> envp = UnwrapStrings(&env_string);
+ return ExecuteCommand(cmd.data(), envp.data(), cwd, tmpdir);
+ }
+
+ private:
+ Logger logger_;
+
+ /// \brief Open file exclusively as write-only.
+ [[nodiscard]] static auto OpenFile(
+ std::filesystem::path const& file_path) noexcept {
+ static auto file_closer = [](gsl::owner<FILE*> f) {
+ if (f != nullptr) {
+ std::fclose(f);
+ }
+ };
+ return std::unique_ptr<FILE, decltype(file_closer)>(
+ std::fopen(file_path.c_str(), "wx"), file_closer);
+ }
+
+ /// \brief Execute command and arguments.
+ /// \param cmd Command arguments as char pointer array.
+ /// \param envp Environment variables as char pointer array.
+ /// \param cwd Working directory for execution.
+ /// \param tmpdir Temporary directory for storing stdout/stderr files.
+ /// \returns ExecOutput if command was successfully submitted to the system.
+ /// \returns std::nullopt on internal failure.
+ [[nodiscard]] auto ExecuteCommand(
+ char* const* cmd,
+ char* const* envp,
+ std::filesystem::path const& cwd,
+ std::filesystem::path const& tmpdir) noexcept
+ -> std::optional<ExecOutput> {
+ auto stdout_file = tmpdir / "stdout";
+ auto stderr_file = tmpdir / "stderr";
+ if (auto const out = OpenFile(stdout_file)) {
+ if (auto const err = OpenFile(stderr_file)) {
+ if (auto retval = ForkAndExecute(
+ cmd, envp, cwd, fileno(out.get()), fileno(err.get()))) {
+ return ExecOutput{*retval,
+ std::move(stdout_file),
+ std::move(stderr_file)};
+ }
+ }
+ else {
+ logger_.Emit(LogLevel::Error,
+ "Failed to open stderr file '{}' with error: {}",
+ stderr_file.string(),
+ strerror(errno));
+ }
+ }
+ else {
+ logger_.Emit(LogLevel::Error,
+ "Failed to open stdout file '{}' with error: {}",
+ stdout_file.string(),
+ strerror(errno));
+ }
+
+ return std::nullopt;
+ }
+
+ /// \brief Fork process and exec command.
+ /// \param cmd Command arguments as char pointer array.
+ /// \param envp Environment variables as char pointer array.
+ /// \param cwd Working directory for execution.
+ /// \param out_fd File descriptor to standard output file.
+ /// \param err_fd File descriptor to standard erro file.
+ /// \returns return code if command was successfully submitted to system.
+ /// \returns std::nullopt if fork or exec failed.
+ [[nodiscard]] auto ForkAndExecute(char* const* cmd,
+ char* const* envp,
+ std::filesystem::path const& cwd,
+ int out_fd,
+ int err_fd) const noexcept
+ -> std::optional<int> {
+ // fork child process
+ pid_t pid = ::fork();
+ if (-1 == pid) {
+ logger_.Emit(LogLevel::Error,
+ "Failed to execute '{}': cannot fork a child process.",
+ *cmd);
+ return std::nullopt;
+ }
+
+ // dispatch child/parent process
+ if (pid == 0) {
+ // some executables require an open (possibly seekable) stdin, and
+ // therefore, we use an open temporary file that does not appear
+ // on the file system and will be removed automatically once the
+ // descriptor is closed.
+ gsl::owner<FILE*> in_file = std::tmpfile();
+ auto in_fd = fileno(in_file);
+
+ // redirect and close fds
+ ::dup2(in_fd, STDIN_FILENO);
+ ::dup2(out_fd, STDOUT_FILENO);
+ ::dup2(err_fd, STDERR_FILENO);
+ ::close(in_fd);
+ ::close(out_fd);
+ ::close(err_fd);
+
+ [[maybe_unused]] auto anchor =
+ FileSystemManager::ChangeDirectory(cwd);
+
+ // execute command in child process and exit
+ ::execvpe(*cmd, cmd, envp);
+
+ // report error and terminate child process if ::execvp did not exit
+ logger_.Emit(LogLevel::Error,
+ "Failed to execute '{}' with error: {}",
+ *cmd,
+ strerror(errno));
+
+ std::exit(EXIT_FAILURE);
+ }
+
+ // wait for child to finish and obtain return value
+ int status{};
+ ::waitpid(pid, &status, 0);
+ // NOLINTNEXTLINE(hicpp-signed-bitwise)
+ return WEXITSTATUS(status);
+ }
+
+ static auto UnwrapStrings(std::vector<std::string>* v) noexcept
+ -> std::vector<char*> {
+ std::vector<char*> raw{};
+ std::transform(std::begin(*v),
+ std::end(*v),
+ std::back_inserter(raw),
+ [](auto& str) { return str.data(); });
+ raw.push_back(nullptr);
+ return raw;
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_FILE_SYSTEM_EXECUTION_SYSTEM_HPP
diff --git a/src/buildtool/graph_traverser/TARGETS b/src/buildtool/graph_traverser/TARGETS
new file mode 100644
index 00000000..ab13a2bc
--- /dev/null
+++ b/src/buildtool/graph_traverser/TARGETS
@@ -0,0 +1,22 @@
+{ "graph_traverser":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["graph_traverser"]
+ , "hdrs": ["graph_traverser.hpp"]
+ , "deps":
+ [ ["src/buildtool/common", "cli"]
+ , ["src/buildtool/common", "tree"]
+ , ["src/buildtool/execution_engine/dag", "dag"]
+ , ["src/buildtool/execution_engine/executor", "executor"]
+ , ["src/buildtool/execution_engine/traverser", "traverser"]
+ , ["src/buildtool/execution_api/local", "local"]
+ , ["src/buildtool/execution_api/remote", "bazel"]
+ , ["src/buildtool/execution_api/remote", "config"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["src/buildtool/file_system", "object_type"]
+ , ["src/buildtool/file_system", "jsonfs"]
+ , ["src/utils/cpp", "json"]
+ , ["@", "fmt", "", "fmt"]
+ ]
+ , "stage": ["src", "buildtool", "graph_traverser"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/graph_traverser/graph_traverser.hpp b/src/buildtool/graph_traverser/graph_traverser.hpp
new file mode 100644
index 00000000..c92fbbf8
--- /dev/null
+++ b/src/buildtool/graph_traverser/graph_traverser.hpp
@@ -0,0 +1,569 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_GRAPH_TRAVERSER_GRAPH_TRAVERSER_HPP
+#define INCLUDED_SRC_BUILDTOOL_GRAPH_TRAVERSER_GRAPH_TRAVERSER_HPP
+
+#include <cstdlib>
+#include <filesystem>
+#include <map>
+#include <optional>
+#include <sstream>
+#include <string>
+#include <unordered_map>
+
+#include "fmt/core.h"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/common/cli.hpp"
+#include "src/buildtool/common/statistics.hpp"
+#include "src/buildtool/common/tree.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp"
+#include "src/buildtool/execution_api/local/local_api.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_api.hpp"
+#include "src/buildtool/execution_api/remote/config.hpp"
+#include "src/buildtool/execution_engine/dag/dag.hpp"
+#include "src/buildtool/execution_engine/executor/executor.hpp"
+#include "src/buildtool/execution_engine/traverser/traverser.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/file_system/jsonfs.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+#include "src/buildtool/logging/log_sink_cmdline.hpp"
+#include "src/buildtool/logging/log_sink_file.hpp"
+#include "src/buildtool/logging/logger.hpp"
+#include "src/utils/cpp/json.hpp"
+
+class GraphTraverser {
+ public:
+ struct CommandLineArguments {
+ std::size_t jobs;
+ EndpointArguments endpoint;
+ BuildArguments build;
+ std::optional<StageArguments> stage;
+ std::optional<RebuildArguments> rebuild;
+ };
+
+ explicit GraphTraverser(CommandLineArguments clargs)
+ : clargs_{std::move(clargs)},
+ api_{CreateExecutionApi(clargs_.endpoint)} {}
+
+ /// \brief Parses actions and blobs into graph, traverses it and retrieves
+ /// outputs specified by command line arguments
+ [[nodiscard]] auto BuildAndStage(
+ std::map<std::string, ArtifactDescription> const& artifact_descriptions,
+ std::map<std::string, ArtifactDescription> const& runfile_descriptions,
+ std::vector<ActionDescription> const& action_descriptions,
+ std::vector<std::string> const& blobs,
+ std::vector<Tree> const& trees) const
+ -> std::optional<std::pair<std::vector<std::filesystem::path>, bool>> {
+ DependencyGraph graph; // must outlive artifact_nodes
+ auto artifacts = BuildArtifacts(&graph,
+ artifact_descriptions,
+ runfile_descriptions,
+ action_descriptions,
+ trees,
+ blobs);
+ if (not artifacts) {
+ return std::nullopt;
+ }
+ auto const [rel_paths, artifact_nodes] = *artifacts;
+
+ auto const object_infos = CollectObjectInfos(artifact_nodes);
+ if (not object_infos) {
+ return std::nullopt;
+ }
+ bool failed_artifacts = false;
+ for (auto const& obj_info : *object_infos) {
+ failed_artifacts = failed_artifacts || obj_info.failed;
+ }
+
+ if (not clargs_.stage) {
+ PrintOutputs("Artifacts built, logical paths are:",
+ rel_paths,
+ artifact_nodes,
+ runfile_descriptions);
+ MaybePrintToStdout(*artifacts);
+ return std::make_pair(std::move(artifacts->first),
+ failed_artifacts);
+ }
+
+ auto output_paths = RetrieveOutputs(rel_paths, *object_infos);
+ if (not output_paths) {
+ return std::nullopt;
+ }
+ PrintOutputs("Artifacts can be found in:",
+ *output_paths,
+ artifact_nodes,
+ runfile_descriptions);
+
+ MaybePrintToStdout(*artifacts);
+
+ return std::make_pair(*output_paths, failed_artifacts);
+ }
+
+ /// \brief Parses graph description into graph, traverses it and retrieves
+ /// outputs specified by command line arguments
+ [[nodiscard]] auto BuildAndStage(
+ std::filesystem::path const& graph_description,
+ nlohmann::json const& artifacts) const
+ -> std::optional<std::pair<std::vector<std::filesystem::path>, bool>> {
+ // Read blobs to upload and actions from graph description file
+ auto desc = ReadGraphDescription(graph_description);
+ if (not desc) {
+ return std::nullopt;
+ }
+ auto const [blobs, tree_descs, actions] = *desc;
+
+ std::vector<ActionDescription> action_descriptions{};
+ action_descriptions.reserve(actions.size());
+ for (auto const& [id, description] : actions.items()) {
+ auto action = ActionDescription::FromJson(id, description);
+ if (not action) {
+ return std::nullopt; // Error already logged
+ }
+ action_descriptions.emplace_back(std::move(*action));
+ }
+
+ std::vector<Tree> trees{};
+ for (auto const& [id, description] : tree_descs.items()) {
+ auto tree = Tree::FromJson(id, description);
+ if (not tree) {
+ return std::nullopt;
+ }
+ trees.emplace_back(std::move(*tree));
+ }
+
+ std::map<std::string, ArtifactDescription> artifact_descriptions{};
+ for (auto const& [rel_path, description] : artifacts.items()) {
+ auto artifact = ArtifactDescription::FromJson(description);
+ if (not artifact) {
+ return std::nullopt; // Error already logged
+ }
+ artifact_descriptions.emplace(rel_path, std::move(*artifact));
+ }
+
+ return BuildAndStage(
+ artifact_descriptions, {}, action_descriptions, blobs, trees);
+ }
+
+ [[nodiscard]] auto ExecutionApi() const -> gsl::not_null<IExecutionApi*> {
+ return &(*api_);
+ }
+
+ private:
+ CommandLineArguments const clargs_;
+ gsl::not_null<IExecutionApi::Ptr> const api_;
+
+ /// \brief Reads contents of graph description file as json object. In case
+ /// the description is missing "blobs" or "actions" key/value pairs or they
+ /// can't be retrieved with the appropriate types, execution is terminated
+ /// after logging error
+ /// \returns A pair containing the blobs to upload (as a vector of strings)
+ /// and the actions as a json object.
+ [[nodiscard]] static auto ReadGraphDescription(
+ std::filesystem::path const& graph_description)
+ -> std::optional<
+ std::tuple<nlohmann::json, nlohmann::json, nlohmann::json>> {
+ auto const graph_description_opt = Json::ReadFile(graph_description);
+ if (not graph_description_opt.has_value()) {
+ Logger::Log(LogLevel::Error,
+ "parsing graph from {}",
+ graph_description.string());
+ return std::nullopt;
+ }
+ auto blobs_opt = ExtractValueAs<std::vector<std::string>>(
+ *graph_description_opt, "blobs", [](std::string const& s) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"blobs\" from "
+ "graph description.",
+ s);
+ });
+ auto trees_opt = ExtractValueAs<nlohmann::json>(
+ *graph_description_opt, "trees", [](std::string const& s) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"trees\" from "
+ "graph description.",
+ s);
+ });
+ auto actions_opt = ExtractValueAs<nlohmann::json>(
+ *graph_description_opt, "actions", [](std::string const& s) {
+ Logger::Log(LogLevel::Error,
+ "{}\ncan not retrieve value for \"actions\" from "
+ "graph description.",
+ s);
+ });
+ if (not blobs_opt or not trees_opt or not actions_opt) {
+ return std::nullopt;
+ }
+ return std::make_tuple(std::move(*blobs_opt),
+ std::move(*trees_opt),
+ std::move(*actions_opt));
+ }
+
+ [[nodiscard]] static auto CreateExecutionApi(
+ EndpointArguments const& clargs) -> gsl::not_null<IExecutionApi::Ptr> {
+ if (clargs.remote_execution_address) {
+ auto remote = RemoteExecutionConfig{};
+ if (not remote.SetAddress(*clargs.remote_execution_address)) {
+ Logger::Log(LogLevel::Error,
+ "parsing remote execution address '{}' failed.",
+ *clargs.remote_execution_address);
+ std::exit(EXIT_FAILURE);
+ }
+
+ ExecutionConfiguration config;
+ config.skip_cache_lookup = false;
+
+ return std::make_unique<BazelApi>(
+ "remote-execution", remote.Host(), remote.Port(), config);
+ }
+ return std::make_unique<LocalApi>();
+ }
+
+ /// \brief Requires for the executor to upload blobs to CAS. In the case any
+ /// of the uploads fails, execution is terminated
+ /// \param[in] blobs blobs to be uploaded
+ [[nodiscard]] auto UploadBlobs(
+ std::vector<std::string> const& blobs) const noexcept -> bool {
+ BlobContainer container;
+ for (auto const& blob : blobs) {
+ auto digest = ArtifactDigest{ComputeHash(blob), blob.size()};
+ Logger::Log(LogLevel::Trace, [&]() {
+ return fmt::format(
+ "Uploaded blob {}, its digest has id {} and size {}.",
+ nlohmann::json(blob).dump(),
+ digest.hash(),
+ digest.size());
+ });
+ try {
+ container.Emplace(BazelBlob{std::move(digest), blob});
+ } catch (std::exception const& ex) {
+ Logger::Log(
+ LogLevel::Error, "failed to create blob with: ", ex.what());
+ return false;
+ }
+ }
+ return api_->Upload(container);
+ }
+
+ /// \brief Adds the artifacts to be retrieved to the graph
+ /// \param[in] g dependency graph
+ /// \param[in] artifacts output artifact map
+ /// \param[in] runfiles output runfile map
+ /// \returns pair of vectors where the first vector contains the absolute
+ /// paths to which the artifacts will be retrieved and the second one
+ /// contains the ids of the artifacts to be retrieved
+ [[nodiscard]] static auto AddArtifactsToRetrieve(
+ gsl::not_null<DependencyGraph*> const& g,
+ std::map<std::string, ArtifactDescription> const& artifacts,
+ std::map<std::string, ArtifactDescription> const& runfiles)
+ -> std::optional<std::pair<std::vector<std::filesystem::path>,
+ std::vector<ArtifactIdentifier>>> {
+ std::vector<std::filesystem::path> rel_paths;
+ std::vector<ArtifactIdentifier> ids;
+ auto total_size = artifacts.size() + runfiles.size();
+ rel_paths.reserve(total_size);
+ ids.reserve(total_size);
+ auto add_and_get_info =
+ [&g, &rel_paths, &ids](
+ std::map<std::string, ArtifactDescription> const& descriptions)
+ -> bool {
+ for (auto const& [rel_path, artifact] : descriptions) {
+ rel_paths.emplace_back(rel_path);
+ ids.emplace_back(g->AddArtifact(artifact));
+ }
+ return true;
+ };
+ if (add_and_get_info(artifacts) and add_and_get_info(runfiles)) {
+ return std::make_pair(std::move(rel_paths), std::move(ids));
+ }
+ return std::nullopt;
+ }
+
+ /// \brief Traverses the graph. In case any of the artifact ids
+ /// specified by the command line arguments is duplicated, execution is
+ /// terminated.
+ [[nodiscard]] auto Traverse(
+ DependencyGraph const& g,
+ std::vector<ArtifactIdentifier> const& artifact_ids) const -> bool {
+ Executor executor{&(*api_), clargs_.build.platform_properties};
+ Traverser t{executor, g, clargs_.jobs};
+ return t.Traverse({std::begin(artifact_ids), std::end(artifact_ids)});
+ }
+
+ [[nodiscard]] auto TraverseRebuild(
+ DependencyGraph const& g,
+ std::vector<ArtifactIdentifier> const& artifact_ids) const -> bool {
+ // create second configuration for cache endpoint
+ auto cache_args = clargs_.endpoint;
+ if (not clargs_.rebuild->cache_endpoint.value_or("").empty()) {
+ cache_args.remote_execution_address =
+ *clargs_.rebuild->cache_endpoint == "local"
+ ? std::nullopt // disable
+ : clargs_.rebuild->cache_endpoint; // set endpoint
+ }
+
+ // setup rebuilder with api for cache endpoint
+ auto api_cached = CreateExecutionApi(cache_args);
+ Rebuilder executor{
+ &(*api_), &(*api_cached), clargs_.build.platform_properties};
+ bool success{false};
+ {
+ Traverser t{executor, g, clargs_.jobs};
+ success =
+ t.Traverse({std::begin(artifact_ids), std::end(artifact_ids)});
+ }
+
+ if (success and clargs_.rebuild->dump_flaky) {
+ std::ofstream file{*clargs_.rebuild->dump_flaky};
+ file << executor.DumpFlakyActions().dump(2);
+ }
+ return success;
+ }
+
+ /// \brief Retrieves nodes corresponding to artifacts with ids in artifacts.
+ /// In case any of the identifiers doesn't correspond to a node inside the
+ /// graph, we write out error message and stop execution with failure code
+ [[nodiscard]] static auto GetArtifactNodes(
+ DependencyGraph const& g,
+ std::vector<ArtifactIdentifier> const& artifact_ids) noexcept
+ -> std::optional<std::vector<DependencyGraph::ArtifactNode const*>> {
+ std::vector<DependencyGraph::ArtifactNode const*> nodes{};
+
+ for (auto const& art_id : artifact_ids) {
+ auto const* node = g.ArtifactNodeWithId(art_id);
+ if (node == nullptr) {
+ Logger::Log(
+ LogLevel::Error, "Artifact {} not found in graph.", art_id);
+ return std::nullopt;
+ }
+ nodes.push_back(node);
+ }
+ return nodes;
+ }
+
+ void LogStatistics() const noexcept {
+ auto const& stats = Statistics::Instance();
+ if (clargs_.rebuild) {
+ std::stringstream ss{};
+ ss << stats.RebuiltActionComparedCounter()
+ << " actions compared with cache";
+ if (stats.ActionsFlakyCounter() > 0) {
+ ss << ", " << stats.ActionsFlakyCounter()
+ << " flaky actions found";
+ ss << " (" << stats.ActionsFlakyTaintedCounter()
+ << " of which tainted)";
+ }
+ if (stats.RebuiltActionMissingCounter() > 0) {
+ ss << ", no cache entry found for "
+ << stats.RebuiltActionMissingCounter() << " actions";
+ }
+ ss << ".";
+ Logger::Log(LogLevel::Info, ss.str());
+ }
+ else {
+ Logger::Log(LogLevel::Info,
+ "Processed {} actions, {} cache hits.",
+ stats.ActionsQueuedCounter(),
+ stats.ActionsCachedCounter());
+ }
+ }
+
+ [[nodiscard]] auto BuildArtifacts(
+ gsl::not_null<DependencyGraph*> const& graph,
+ std::map<std::string, ArtifactDescription> const& artifacts,
+ std::map<std::string, ArtifactDescription> const& runfiles,
+ std::vector<ActionDescription> const& actions,
+ std::vector<Tree> const& trees,
+ std::vector<std::string> const& blobs) const
+ -> std::optional<
+ std::pair<std::vector<std::filesystem::path>,
+ std::vector<DependencyGraph::ArtifactNode const*>>> {
+ if (not UploadBlobs(blobs)) {
+ return std::nullopt;
+ }
+
+ auto artifact_infos =
+ AddArtifactsToRetrieve(graph, artifacts, runfiles);
+ if (not artifact_infos) {
+ return std::nullopt;
+ }
+ auto& [output_paths, artifact_ids] = *artifact_infos;
+
+ std::vector<ActionDescription> tree_actions{};
+ tree_actions.reserve(trees.size());
+ for (auto const& tree : trees) {
+ tree_actions.emplace_back(tree.Action());
+ }
+
+ if (not graph->Add(actions) or not graph->Add(tree_actions)) {
+ Logger::Log(LogLevel::Error, [&actions]() {
+ auto json = nlohmann::json::array();
+ for (auto const& desc : actions) {
+ json.push_back(desc.ToJson());
+ }
+ return fmt::format(
+ "could not build the dependency graph from the actions "
+ "described in {}.",
+ json.dump());
+ });
+ return std::nullopt;
+ }
+
+ if (clargs_.rebuild ? not TraverseRebuild(*graph, artifact_ids)
+ : not Traverse(*graph, artifact_ids)) {
+ Logger::Log(LogLevel::Error, "traversing graph failed.");
+ return std::nullopt;
+ }
+
+ LogStatistics();
+
+ auto artifact_nodes = GetArtifactNodes(*graph, artifact_ids);
+ if (not artifact_nodes) {
+ return std::nullopt;
+ }
+ return std::make_pair(std::move(output_paths),
+ std::move(*artifact_nodes));
+ }
+
+ [[nodiscard]] auto PrepareOutputPaths(
+ std::vector<std::filesystem::path> const& rel_paths) const
+ -> std::optional<std::vector<std::filesystem::path>> {
+ std::vector<std::filesystem::path> output_paths{};
+ output_paths.reserve(rel_paths.size());
+ for (auto const& rel_path : rel_paths) {
+ auto output_path = clargs_.stage->output_dir / rel_path;
+ if (FileSystemManager::IsFile(output_path) and
+ not FileSystemManager::RemoveFile(output_path)) {
+ Logger::Log(LogLevel::Error,
+ "Could not clean output path {}",
+ output_path.string());
+ return std::nullopt;
+ }
+ output_paths.emplace_back(std::move(output_path));
+ }
+ return output_paths;
+ }
+
+ [[nodiscard]] static auto CollectObjectInfos(
+ std::vector<DependencyGraph::ArtifactNode const*> const& artifact_nodes)
+ -> std::optional<std::vector<Artifact::ObjectInfo>> {
+ std::vector<Artifact::ObjectInfo> object_infos;
+ object_infos.reserve(artifact_nodes.size());
+ for (auto const* art_ptr : artifact_nodes) {
+ auto const& info = art_ptr->Content().Info();
+ if (info) {
+ object_infos.push_back(*info);
+ }
+ else {
+ Logger::Log(LogLevel::Error,
+ "artifact {} could not be retrieved, it can not be "
+ "found in CAS.",
+ art_ptr->Content().Id());
+ return std::nullopt;
+ }
+ }
+ return object_infos;
+ }
+
+ /// \brief Asks execution API to copy output artifacts to paths specified by
+ /// command line arguments and writes location info. In case the executor
+ /// couldn't retrieve any of the outputs, execution is terminated.
+ [[nodiscard]] auto RetrieveOutputs(
+ std::vector<std::filesystem::path> const& rel_paths,
+ std::vector<Artifact::ObjectInfo> const& object_infos) const
+ -> std::optional<std::vector<std::filesystem::path>> {
+ // Create output directory
+ if (not FileSystemManager::CreateDirectory(clargs_.stage->output_dir)) {
+ return std::nullopt; // Message logged in the file system manager
+ }
+
+ auto output_paths = PrepareOutputPaths(rel_paths);
+
+ if (not output_paths or
+ not api_->RetrieveToPaths(object_infos, *output_paths)) {
+ Logger::Log(LogLevel::Error, "Could not retrieve outputs.");
+ return std::nullopt;
+ }
+
+ return std::move(*output_paths);
+ }
+
+ void PrintOutputs(
+ std::string message,
+ std::vector<std::filesystem::path> const& paths,
+ std::vector<DependencyGraph::ArtifactNode const*> const& artifact_nodes,
+ std::map<std::string, ArtifactDescription> const& runfiles) const {
+ std::string msg_dbg{"Artifact ids:"};
+ nlohmann::json json{};
+ for (std::size_t pos = 0; pos < paths.size(); ++pos) {
+ auto path = paths[pos].string();
+ auto id = IdentifierToString(artifact_nodes[pos]->Content().Id());
+ if (clargs_.build.show_runfiles or
+ not runfiles.contains(clargs_.stage
+ ? std::filesystem::proximate(
+ path, clargs_.stage->output_dir)
+ .string()
+ : path)) {
+ auto info = artifact_nodes[pos]->Content().Info();
+ if (info) {
+ message += fmt::format("\n {} {}", path, info->ToString());
+ if (clargs_.build.dump_artifacts) {
+ json[path] = info->ToJson();
+ }
+ }
+ else {
+ Logger::Log(
+ LogLevel::Error, "Missing info for artifact {}.", id);
+ }
+ }
+ msg_dbg += fmt::format("\n {}: {}", path, id);
+ }
+
+ if (not clargs_.build.show_runfiles and !runfiles.empty()) {
+ message += fmt::format("\n({} runfiles omitted.)", runfiles.size());
+ }
+
+ Logger::Log(LogLevel::Info, "{}", message);
+ Logger::Log(LogLevel::Debug, "{}", msg_dbg);
+
+ if (clargs_.build.dump_artifacts) {
+ if (*clargs_.build.dump_artifacts == "-") {
+ std::cout << std::setw(2) << json << std::endl;
+ }
+ else {
+ std::ofstream os(*clargs_.build.dump_artifacts);
+ os << std::setw(2) << json << std::endl;
+ }
+ }
+ }
+
+ void MaybePrintToStdout(
+ std::pair<std::vector<std::filesystem::path>,
+ std::vector<DependencyGraph::ArtifactNode const*>> artifacts)
+ const {
+ if (clargs_.build.print_to_stdout) {
+ for (size_t i = 0; i < artifacts.first.size(); i++) {
+ if (artifacts.first[i] == *(clargs_.build.print_to_stdout)) {
+ auto info = artifacts.second[i]->Content().Info();
+ if (info) {
+ if (not api_->RetrieveToFds({*info},
+ {dup(fileno(stdout))})) {
+ Logger::Log(LogLevel::Error,
+ "Failed to retrieve {}",
+ *(clargs_.build.print_to_stdout));
+ }
+ }
+ else {
+ Logger::Log(
+ LogLevel::Error,
+ "Failed to obtain object information for {}",
+ *(clargs_.build.print_to_stdout));
+ }
+ return;
+ }
+ }
+ Logger::Log(LogLevel::Warning,
+ "{} not a logical path of the specified target",
+ *(clargs_.build.print_to_stdout));
+ }
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_GRAPH_TRAVERSER_GRAPH_TRAVERSER_HPP
diff --git a/src/buildtool/logging/TARGETS b/src/buildtool/logging/TARGETS
new file mode 100644
index 00000000..2a02cd93
--- /dev/null
+++ b/src/buildtool/logging/TARGETS
@@ -0,0 +1,20 @@
+{ "log_level":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["log_level"]
+ , "hdrs": ["log_level.hpp"]
+ , "stage": ["src", "buildtool", "logging"]
+ }
+, "logging":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["logging"]
+ , "hdrs":
+ [ "log_config.hpp"
+ , "log_sink.hpp"
+ , "log_sink_cmdline.hpp"
+ , "log_sink_file.hpp"
+ , "logger.hpp"
+ ]
+ , "deps": ["log_level", ["@", "fmt", "", "fmt"]]
+ , "stage": ["src", "buildtool", "logging"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/logging/log_config.hpp b/src/buildtool/logging/log_config.hpp
new file mode 100644
index 00000000..799a6ad5
--- /dev/null
+++ b/src/buildtool/logging/log_config.hpp
@@ -0,0 +1,69 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_CONFIG_HPP
+#define INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_CONFIG_HPP
+
+#include <mutex>
+#include <vector>
+
+#include "src/buildtool/logging/log_level.hpp"
+#include "src/buildtool/logging/log_sink.hpp"
+
+/// \brief Global static logging configuration.
+/// The entire class is thread-safe.
+class LogConfig {
+ public:
+ /// \brief Set the log limit.
+ static void SetLogLimit(LogLevel level) noexcept { log_limit_ = level; }
+
+ /// \brief Replace all configured sinks.
+ /// NOTE: Reinitializes all internal factories.
+ static void SetSinks(std::vector<LogSinkFactory>&& factories) noexcept {
+ std::lock_guard lock{mutex_};
+ sinks_.clear();
+ sinks_.reserve(factories.size());
+ std::transform(factories.cbegin(),
+ factories.cend(),
+ std::back_inserter(sinks_),
+ [](auto& f) { return f(); });
+ factories_ = std::move(factories);
+ }
+
+ /// \brief Add new a new sink.
+ static void AddSink(LogSinkFactory&& factory) noexcept {
+ std::lock_guard lock{mutex_};
+ sinks_.push_back(factory());
+ factories_.push_back(std::move(factory));
+ }
+
+ /// \brief Get the currently configured log limit.
+ [[nodiscard]] static auto LogLimit() noexcept -> LogLevel {
+ return log_limit_;
+ }
+
+ /// \brief Get sink instances for all configured sink factories.
+ /// Returns a const copy of shared_ptrs, so accessing the sinks in the
+ /// calling context is thread-safe.
+ // NOLINTNEXTLINE(readability-const-return-type)
+ [[nodiscard]] static auto Sinks() noexcept
+ -> std::vector<ILogSink::Ptr> const {
+ std::lock_guard lock{mutex_};
+ return sinks_;
+ }
+
+ /// \brief Get all configured sink factories.
+ /// Returns a const copy of shared_ptrs, so accessing the factories in the
+ /// calling context is thread-safe.
+ // NOLINTNEXTLINE(readability-const-return-type)
+ [[nodiscard]] static auto SinkFactories() noexcept
+ -> std::vector<LogSinkFactory> const {
+ std::lock_guard lock{mutex_};
+ return factories_;
+ }
+
+ private:
+ static inline std::mutex mutex_{};
+ static inline LogLevel log_limit_{LogLevel::Info};
+ static inline std::vector<ILogSink::Ptr> sinks_{};
+ static inline std::vector<LogSinkFactory> factories_{};
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_CONFIG_HPP
diff --git a/src/buildtool/logging/log_level.hpp b/src/buildtool/logging/log_level.hpp
new file mode 100644
index 00000000..6847e69c
--- /dev/null
+++ b/src/buildtool/logging/log_level.hpp
@@ -0,0 +1,41 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_LEVEL_HPP
+#define INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_LEVEL_HPP
+
+#include <algorithm>
+#include <string>
+#include <type_traits>
+
+enum class LogLevel {
+ Error, ///< Error messages, fatal errors
+ Warning, ///< Warning messages, recoverable situations that shouldn't occur
+ Info, ///< Informative messages, such as reporting status or statistics
+ Debug, ///< Debug messages, such as details from internal processes
+ Trace ///< Trace messages, verbose details such as function calls
+};
+
+constexpr auto kFirstLogLevel = LogLevel::Error;
+constexpr auto kLastLogLevel = LogLevel::Trace;
+
+[[nodiscard]] static inline auto ToLogLevel(
+ std::underlying_type_t<LogLevel> level) -> LogLevel {
+ return std::min(std::max(static_cast<LogLevel>(level), kFirstLogLevel),
+ kLastLogLevel);
+}
+
+[[nodiscard]] static inline auto LogLevelToString(LogLevel level)
+ -> std::string {
+ switch (level) {
+ case LogLevel::Error:
+ return "ERROR";
+ case LogLevel::Warning:
+ return "WARN";
+ case LogLevel::Info:
+ return "INFO";
+ case LogLevel::Debug:
+ return "DEBUG";
+ case LogLevel::Trace:
+ return "TRACE";
+ }
+}
+
+#endif // INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_LEVEL_HPP
diff --git a/src/buildtool/logging/log_sink.hpp b/src/buildtool/logging/log_sink.hpp
new file mode 100644
index 00000000..3c1028cd
--- /dev/null
+++ b/src/buildtool/logging/log_sink.hpp
@@ -0,0 +1,41 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_HPP
+#define INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_HPP
+
+#include <functional>
+#include <istream>
+#include <memory>
+#include <string>
+
+#include "src/buildtool/logging/log_level.hpp"
+
+// forward declaration
+class Logger;
+
+class ILogSink {
+ public:
+ using Ptr = std::shared_ptr<ILogSink>;
+ ILogSink() noexcept = default;
+ ILogSink(ILogSink const&) = delete;
+ ILogSink(ILogSink&&) = delete;
+ auto operator=(ILogSink const&) -> ILogSink& = delete;
+ auto operator=(ILogSink &&) -> ILogSink& = delete;
+ virtual ~ILogSink() noexcept = default;
+
+ /// \brief Thread-safe emitting of log messages.
+ /// Logger might be 'nullptr' if called from the global context.
+ virtual void Emit(Logger const* logger,
+ LogLevel level,
+ std::string const& msg) const noexcept = 0;
+
+ protected:
+ /// \brief Helper class for line iteration with std::istream_iterator.
+ class Line : public std::string {
+ friend auto operator>>(std::istream& is, Line& line) -> std::istream& {
+ return std::getline(is, line);
+ }
+ };
+};
+
+using LogSinkFactory = std::function<ILogSink::Ptr()>;
+
+#endif // INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_HPP
diff --git a/src/buildtool/logging/log_sink_cmdline.hpp b/src/buildtool/logging/log_sink_cmdline.hpp
new file mode 100644
index 00000000..f7e5f915
--- /dev/null
+++ b/src/buildtool/logging/log_sink_cmdline.hpp
@@ -0,0 +1,93 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_CMDLINE_HPP
+#define INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_CMDLINE_HPP
+
+#include <iterator>
+#include <memory>
+#include <mutex>
+#include <sstream>
+#include <string>
+
+#include "fmt/color.h"
+#include "fmt/core.h"
+#include "src/buildtool/logging/log_sink.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+class LogSinkCmdLine final : public ILogSink {
+ public:
+ static auto CreateFactory(bool colored = true) -> LogSinkFactory {
+ return [=]() { return std::make_shared<LogSinkCmdLine>(colored); };
+ }
+
+ explicit LogSinkCmdLine(bool colored) noexcept : colored_{colored} {}
+ ~LogSinkCmdLine() noexcept final = default;
+ LogSinkCmdLine(LogSinkCmdLine const&) noexcept = delete;
+ LogSinkCmdLine(LogSinkCmdLine&&) noexcept = delete;
+ auto operator=(LogSinkCmdLine const&) noexcept -> LogSinkCmdLine& = delete;
+ auto operator=(LogSinkCmdLine&&) noexcept -> LogSinkCmdLine& = delete;
+
+ /// \brief Thread-safe emitting of log messages to stderr.
+ void Emit(Logger const* logger,
+ LogLevel level,
+ std::string const& msg) const noexcept final {
+ auto prefix = LogLevelToString(level);
+
+ if (logger != nullptr) {
+ // append logger name
+ prefix = fmt::format("{} ({})", prefix, logger->Name());
+ }
+ prefix = prefix + ":";
+ auto cont_prefix = std::string(prefix.size(), ' ');
+ prefix = FormatPrefix(level, prefix);
+ bool msg_on_continuation{false};
+ if (logger != nullptr and msg.find('\n') != std::string::npos) {
+ cont_prefix = " ";
+ msg_on_continuation = true;
+ }
+
+ {
+ std::lock_guard lock{mutex_};
+ if (msg_on_continuation) {
+ fmt::print(stderr, "{}\n", prefix);
+ prefix = cont_prefix;
+ }
+ using it = std::istream_iterator<ILogSink::Line>;
+ std::istringstream iss{msg};
+ for_each(it{iss}, it{}, [&](auto const& line) {
+ fmt::print(stderr, "{} {}\n", prefix, line);
+ prefix = cont_prefix;
+ });
+ }
+ }
+
+ private:
+ bool colored_{};
+ static inline std::mutex mutex_{};
+
+ [[nodiscard]] auto FormatPrefix(LogLevel level,
+ std::string const& prefix) const noexcept
+ -> std::string {
+ fmt::text_style style{};
+ if (colored_) {
+ switch (level) {
+ case LogLevel::Error:
+ style = fg(fmt::color::red);
+ break;
+ case LogLevel::Warning:
+ style = fg(fmt::color::orange);
+ break;
+ case LogLevel::Info:
+ style = fg(fmt::color::lime_green);
+ break;
+ case LogLevel::Debug:
+ style = fg(fmt::color::yellow);
+ break;
+ case LogLevel::Trace:
+ style = fg(fmt::color::light_sky_blue);
+ break;
+ }
+ }
+ return fmt::format(style, "{}", prefix);
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_CMDLINE_HPP
diff --git a/src/buildtool/logging/log_sink_file.hpp b/src/buildtool/logging/log_sink_file.hpp
new file mode 100644
index 00000000..2ca1b75b
--- /dev/null
+++ b/src/buildtool/logging/log_sink_file.hpp
@@ -0,0 +1,129 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_FILE_HPP
+#define INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_FILE_HPP
+
+#include <cstdio>
+#include <filesystem>
+#include <functional>
+#include <iterator>
+#include <memory>
+#include <mutex>
+#include <sstream>
+#include <string>
+#include <unordered_map>
+
+#ifdef __unix__
+#include <sys/time.h>
+#endif
+
+#include "fmt/chrono.h"
+#include "fmt/core.h"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/logging/log_sink.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+/// \brief Thread-safe map of mutexes.
+template <class T_Key>
+class MutexMap {
+ public:
+ /// \brief Create mutex for key and run callback if successfully created.
+ /// Callback is executed while the internal map is still held exclusively.
+ void Create(T_Key const& key, std::function<void()> const& callback) {
+ std::lock_guard lock(mutex_);
+ if (not map_.contains(key)) {
+ [[maybe_unused]] auto& mutex = map_[key];
+ callback();
+ }
+ }
+ /// \brief Get mutex for key, creates mutex if key does not exist.
+ [[nodiscard]] auto Get(T_Key const& key) noexcept -> std::mutex& {
+ std::lock_guard lock(mutex_);
+ return map_[key];
+ }
+
+ private:
+ std::mutex mutex_{};
+ std::unordered_map<T_Key, std::mutex> map_{};
+};
+
+class LogSinkFile final : public ILogSink {
+ public:
+ enum class Mode {
+ Append, ///< Append if log file already exists.
+ Overwrite ///< Overwrite log file with each new program instantiation.
+ };
+
+ static auto CreateFactory(std::filesystem::path const& file_path,
+ Mode file_mode = Mode::Append) -> LogSinkFactory {
+ return
+ [=] { return std::make_shared<LogSinkFile>(file_path, file_mode); };
+ }
+
+ LogSinkFile(std::filesystem::path const& file_path, Mode file_mode)
+ : file_path_{std::filesystem::weakly_canonical(file_path).string()} {
+ // create file mutex for canonical path
+ file_mutexes_.Create(file_path_, [&] {
+ if (file_mode == Mode::Overwrite) {
+ // clear file contents
+ if (gsl::owner<FILE*> file =
+ std::fopen(file_path_.c_str(), "w")) {
+ std::fclose(file);
+ }
+ }
+ });
+ }
+ ~LogSinkFile() noexcept final = default;
+ LogSinkFile(LogSinkFile const&) noexcept = delete;
+ LogSinkFile(LogSinkFile&&) noexcept = delete;
+ auto operator=(LogSinkFile const&) noexcept -> LogSinkFile& = delete;
+ auto operator=(LogSinkFile&&) noexcept -> LogSinkFile& = delete;
+
+ /// \brief Thread-safe emitting of log messages to file.
+ /// Race-conditions for file writes are resolved via a separate mutexes for
+ /// every canonical file path shared across all instances of this class.
+ void Emit(Logger const* logger,
+ LogLevel level,
+ std::string const& msg) const noexcept final {
+#ifdef __unix__ // support nanoseconds for timestamp
+ timespec ts{};
+ clock_gettime(CLOCK_REALTIME, &ts);
+ auto timestamp = fmt::format(
+ "{:%Y-%m-%d %H:%M:%S}.{}", fmt::localtime(ts.tv_sec), ts.tv_nsec);
+#else
+ auto timestamp = fmt::format(
+ "{:%Y-%m-%d %H:%M:%S}", fmt::localtime(std::time(nullptr));
+#endif
+
+ std::ostringstream id{};
+ id << "thread:" << std::this_thread::get_id();
+ auto thread = id.str();
+
+ auto prefix = fmt::format(
+ "{}, [{}] {}", thread, timestamp, LogLevelToString(level));
+
+ if (logger != nullptr) {
+ // append logger name
+ prefix = fmt::format("{} ({})", prefix, logger->Name());
+ }
+ prefix = fmt::format("{}:", prefix);
+ auto cont_prefix = std::string(prefix.size(), ' ');
+
+ {
+ std::lock_guard lock{file_mutexes_.Get(file_path_)};
+ if (gsl::owner<FILE*> file = std::fopen(file_path_.c_str(), "a")) {
+ using it = std::istream_iterator<ILogSink::Line>;
+ std::istringstream iss{msg};
+ for_each(it{iss}, it{}, [&](auto const& line) {
+ fmt::print(file, "{} {}\n", prefix, line);
+ prefix = cont_prefix;
+ });
+ std::fclose(file);
+ }
+ }
+ }
+
+ private:
+ std::string file_path_{};
+ static inline MutexMap<std::string> file_mutexes_{};
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_LOGGING_LOG_SINK_FILE_HPP
diff --git a/src/buildtool/logging/logger.hpp b/src/buildtool/logging/logger.hpp
new file mode 100644
index 00000000..60742607
--- /dev/null
+++ b/src/buildtool/logging/logger.hpp
@@ -0,0 +1,123 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_LOGGING_LOGGER_HPP
+#define INCLUDED_SRC_BUILDTOOL_LOGGING_LOGGER_HPP
+
+#include <functional>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "fmt/core.h"
+#include "src/buildtool/logging/log_config.hpp"
+#include "src/buildtool/logging/log_sink.hpp"
+
+class Logger {
+ public:
+ using MessageCreateFunc = std::function<std::string()>;
+
+ /// \brief Create logger with sink instances from LogConfig::Sinks().
+ explicit Logger(std::string name) noexcept
+ : name_{std::move(name)},
+ log_limit_{LogConfig::LogLimit()},
+ sinks_{LogConfig::Sinks()} {}
+
+ /// \brief Create logger with new sink instances from specified factories.
+ Logger(std::string name,
+ std::vector<LogSinkFactory> const& factories) noexcept
+ : name_{std::move(name)}, log_limit_{LogConfig::LogLimit()} {
+ sinks_.reserve(factories.size());
+ std::transform(factories.cbegin(),
+ factories.cend(),
+ std::back_inserter(sinks_),
+ [](auto& f) { return f(); });
+ }
+
+ ~Logger() noexcept = default;
+ Logger(Logger const&) noexcept = delete;
+ Logger(Logger&&) noexcept = delete;
+ auto operator=(Logger const&) noexcept -> Logger& = delete;
+ auto operator=(Logger&&) noexcept -> Logger& = delete;
+
+ /// \brief Get logger name.
+ [[nodiscard]] auto Name() const& noexcept -> std::string const& {
+ return name_;
+ }
+
+ /// \brief Get log limit.
+ [[nodiscard]] auto LogLimit() const noexcept -> LogLevel {
+ return log_limit_;
+ }
+
+ /// \brief Set log limit.
+ void SetLogLimit(LogLevel level) noexcept { log_limit_ = level; }
+
+ /// \brief Emit log message from string via this logger instance.
+ template <class... T_Args>
+ void Emit(LogLevel level,
+ std::string const& msg,
+ T_Args&&... args) const noexcept {
+ if (static_cast<int>(level) <= static_cast<int>(log_limit_)) {
+ FormatAndForward(
+ this, sinks_, level, msg, std::forward<T_Args>(args)...);
+ }
+ }
+
+ /// \brief Emit log message from lambda via this logger instance.
+ void Emit(LogLevel level,
+ MessageCreateFunc const& msg_creator) const noexcept {
+ if (static_cast<int>(level) <= static_cast<int>(log_limit_)) {
+ FormatAndForward(this, sinks_, level, msg_creator());
+ }
+ }
+
+ /// \brief Log message from string via LogConfig's sinks and log limit.
+ template <class... T_Args>
+ static void Log(LogLevel level,
+ std::string const& msg,
+ T_Args&&... args) noexcept {
+ if (static_cast<int>(level) <=
+ static_cast<int>(LogConfig::LogLimit())) {
+ FormatAndForward(nullptr,
+ LogConfig::Sinks(),
+ level,
+ msg,
+ std::forward<T_Args>(args)...);
+ }
+ }
+
+ /// \brief Log message from lambda via LogConfig's sinks and log limit.
+ static void Log(LogLevel level,
+ MessageCreateFunc const& msg_creator) noexcept {
+ if (static_cast<int>(level) <=
+ static_cast<int>(LogConfig::LogLimit())) {
+ FormatAndForward(nullptr, LogConfig::Sinks(), level, msg_creator());
+ }
+ }
+
+ private:
+ std::string name_{};
+ LogLevel log_limit_{};
+ std::vector<ILogSink::Ptr> sinks_{};
+
+ /// \brief Format message and forward to sinks.
+ template <class... T_Args>
+ static void FormatAndForward(Logger const* logger,
+ std::vector<ILogSink::Ptr> const& sinks,
+ LogLevel level,
+ std::string const& msg,
+ T_Args&&... args) noexcept {
+ if constexpr (sizeof...(T_Args) == 0) {
+ // forward to sinks
+ std::for_each(sinks.cbegin(), sinks.cend(), [&](auto& sink) {
+ sink->Emit(logger, level, msg);
+ });
+ }
+ else {
+ // format the message
+ auto fmsg = fmt::format(msg, std::forward<T_Args>(args)...);
+ // recursive call without format arguments
+ FormatAndForward(logger, sinks, level, fmsg);
+ }
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_LOGGING_LOGGER_HPP
diff --git a/src/buildtool/main/TARGETS b/src/buildtool/main/TARGETS
new file mode 100644
index 00000000..ad4ab98a
--- /dev/null
+++ b/src/buildtool/main/TARGETS
@@ -0,0 +1,21 @@
+{ "just":
+ { "type": ["@", "rules", "CC", "binary"]
+ , "name": ["just"]
+ , "srcs": ["main.cpp"]
+ , "private-hdrs": ["main.hpp"]
+ , "deps":
+ [ ["src/buildtool/common", "cli"]
+ , ["src/buildtool/common", "config"]
+ , ["src/buildtool/graph_traverser", "graph_traverser"]
+ , ["src/buildtool/logging", "logging"]
+ , ["src/buildtool/build_engine/base_maps", "directory_map"]
+ , ["src/buildtool/build_engine/base_maps", "rule_map"]
+ , ["src/buildtool/build_engine/base_maps", "source_map"]
+ , ["src/buildtool/build_engine/base_maps", "targets_file_map"]
+ , ["src/buildtool/build_engine/target_map", "target_map"]
+ , ["src/utils/cpp", "concepts"]
+ , ["src/utils/cpp", "json"]
+ ]
+ , "stage": ["src", "buildtool", "main"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/main/main.cpp b/src/buildtool/main/main.cpp
new file mode 100644
index 00000000..74d75d56
--- /dev/null
+++ b/src/buildtool/main/main.cpp
@@ -0,0 +1,1292 @@
+#include "src/buildtool/main/main.hpp"
+
+#include <algorithm>
+#include <cstdlib>
+#include <filesystem>
+#include <fstream>
+#include <iostream>
+#include <string>
+
+#include "src/buildtool/build_engine/base_maps/directory_map.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/base_maps/expression_map.hpp"
+#include "src/buildtool/build_engine/base_maps/rule_map.hpp"
+#include "src/buildtool/build_engine/base_maps/source_map.hpp"
+#include "src/buildtool/build_engine/base_maps/targets_file_map.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/buildtool/build_engine/target_map/target_map.hpp"
+#include "src/buildtool/common/artifact_description.hpp"
+#include "src/buildtool/common/cli.hpp"
+#include "src/buildtool/common/repository_config.hpp"
+#ifndef BOOTSTRAP_BUILD_TOOL
+#include "src/buildtool/graph_traverser/graph_traverser.hpp"
+#endif
+#include "src/buildtool/logging/log_config.hpp"
+#include "src/buildtool/logging/log_sink_cmdline.hpp"
+#include "src/buildtool/logging/log_sink_file.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+#include "src/utils/cpp/concepts.hpp"
+#include "src/utils/cpp/json.hpp"
+
+namespace {
+
+namespace Base = BuildMaps::Base;
+namespace Target = BuildMaps::Target;
+
+enum class SubCommand {
+ kUnknown,
+ kDescribe,
+ kAnalyse,
+ kBuild,
+ kInstall,
+ kRebuild,
+ kInstallCas,
+ kTraverse
+};
+
+struct CommandLineArguments {
+ SubCommand cmd{SubCommand::kUnknown};
+ CommonArguments common;
+ AnalysisArguments analysis;
+ DiagnosticArguments diagnose;
+ EndpointArguments endpoint;
+ BuildArguments build;
+ StageArguments stage;
+ RebuildArguments rebuild;
+ FetchArguments fetch;
+ GraphArguments graph;
+};
+
+/// \brief Setup arguments for sub command "just describe".
+auto SetupDescribeCommandArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<CommandLineArguments*> const& clargs) {
+ SetupCommonArguments(app, &clargs->common);
+ SetupAnalysisArguments(app, &clargs->analysis, false);
+}
+
+/// \brief Setup arguments for sub command "just analyse".
+auto SetupAnalyseCommandArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<CommandLineArguments*> const& clargs) {
+ SetupCommonArguments(app, &clargs->common);
+ SetupAnalysisArguments(app, &clargs->analysis);
+ SetupDiagnosticArguments(app, &clargs->diagnose);
+}
+
+/// \brief Setup arguments for sub command "just build".
+auto SetupBuildCommandArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<CommandLineArguments*> const& clargs) {
+ SetupCommonArguments(app, &clargs->common);
+ SetupAnalysisArguments(app, &clargs->analysis);
+ SetupEndpointArguments(app, &clargs->endpoint);
+ SetupBuildArguments(app, &clargs->build);
+}
+
+/// \brief Setup arguments for sub command "just install".
+auto SetupInstallCommandArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<CommandLineArguments*> const& clargs) {
+ SetupBuildCommandArguments(app, clargs); // same as build
+ SetupStageArguments(app, &clargs->stage); // plus stage
+}
+
+/// \brief Setup arguments for sub command "just rebuild".
+auto SetupRebuildCommandArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<CommandLineArguments*> const& clargs) {
+ SetupBuildCommandArguments(app, clargs); // same as build
+ SetupRebuildArguments(app, &clargs->rebuild); // plus rebuild
+}
+
+/// \brief Setup arguments for sub command "just install-cas".
+auto SetupInstallCasCommandArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<CommandLineArguments*> const& clargs) {
+ SetupEndpointArguments(app, &clargs->endpoint);
+ SetupFetchArguments(app, &clargs->fetch);
+}
+
+/// \brief Setup arguments for sub command "just traverse".
+auto SetupTraverseCommandArguments(
+ gsl::not_null<CLI::App*> const& app,
+ gsl::not_null<CommandLineArguments*> const& clargs) {
+ SetupCommonArguments(app, &clargs->common);
+ SetupEndpointArguments(app, &clargs->endpoint);
+ SetupGraphArguments(app, &clargs->graph); // instead of analysis
+ SetupBuildArguments(app, &clargs->build);
+ SetupStageArguments(app, &clargs->stage);
+}
+
+auto ParseCommandLineArguments(int argc, char const* const* argv)
+ -> CommandLineArguments {
+ CLI::App app("just");
+ app.option_defaults()->take_last();
+
+ auto* cmd_describe = app.add_subcommand(
+ "describe", "Describe the rule generating a target.");
+ auto* cmd_analyse =
+ app.add_subcommand("analyse", "Analyse specified targets.");
+ auto* cmd_build = app.add_subcommand("build", "Build specified targets.");
+ auto* cmd_install =
+ app.add_subcommand("install", "Build and stage specified targets.");
+ auto* cmd_rebuild = app.add_subcommand(
+ "rebuild", "Rebuild and compare artifacts to cached build.");
+ auto* cmd_install_cas =
+ app.add_subcommand("install-cas", "Fetch and stage artifact from CAS.");
+ auto* cmd_traverse =
+ app.group("") // group for creating hidden options
+ ->add_subcommand("traverse",
+ "Build and stage artifacts from graph file.");
+ app.require_subcommand(1);
+
+ CommandLineArguments clargs;
+ SetupDescribeCommandArguments(cmd_describe, &clargs);
+ SetupAnalyseCommandArguments(cmd_analyse, &clargs);
+ SetupBuildCommandArguments(cmd_build, &clargs);
+ SetupInstallCommandArguments(cmd_install, &clargs);
+ SetupRebuildCommandArguments(cmd_rebuild, &clargs);
+ SetupInstallCasCommandArguments(cmd_install_cas, &clargs);
+ SetupTraverseCommandArguments(cmd_traverse, &clargs);
+
+ try {
+ app.parse(argc, argv);
+ } catch (CLI::Error& e) {
+ std::exit(app.exit(e));
+ }
+
+ if (*cmd_describe) {
+ clargs.cmd = SubCommand::kDescribe;
+ }
+ else if (*cmd_analyse) {
+ clargs.cmd = SubCommand::kAnalyse;
+ }
+ else if (*cmd_build) {
+ clargs.cmd = SubCommand::kBuild;
+ }
+ else if (*cmd_install) {
+ clargs.cmd = SubCommand::kInstall;
+ }
+ else if (*cmd_rebuild) {
+ clargs.cmd = SubCommand::kRebuild;
+ }
+ else if (*cmd_install_cas) {
+ clargs.cmd = SubCommand::kInstallCas;
+ }
+ else if (*cmd_traverse) {
+ clargs.cmd = SubCommand::kTraverse;
+ }
+
+ return clargs;
+}
+
+void SetupLogging(CommonArguments const& clargs) {
+ LogConfig::SetLogLimit(clargs.log_limit);
+ LogConfig::SetSinks({LogSinkCmdLine::CreateFactory()});
+ if (clargs.log_file) {
+ LogConfig::AddSink(LogSinkFile::CreateFactory(
+ *clargs.log_file, LogSinkFile::Mode::Overwrite));
+ }
+}
+
+#ifndef BOOTSTRAP_BUILD_TOOL
+void SetupLocalExecution(EndpointArguments const& eargs,
+ BuildArguments const& bargs) {
+ using LocalConfig = LocalExecutionConfig;
+ if (not LocalConfig::SetKeepBuildDir(bargs.persistent_build_dir) or
+ not(not eargs.local_root or
+ (LocalConfig::SetBuildRoot(*eargs.local_root) and
+ LocalConfig::SetDiskCache(*eargs.local_root))) or
+ not(not bargs.local_launcher or
+ LocalConfig::SetLauncher(*bargs.local_launcher))) {
+ Logger::Log(LogLevel::Error, "failed to configure local execution.");
+ }
+}
+#endif
+
+// returns path relative to `root`.
+[[nodiscard]] auto FindRoot(std::filesystem::path const& subdir,
+ FileRoot const& root,
+ std::vector<std::string> const& markers)
+ -> std::optional<std::filesystem::path> {
+ gsl_Expects(subdir.is_relative());
+ auto current = subdir;
+ while (true) {
+ for (auto const& marker : markers) {
+ if (root.Exists(current / marker)) {
+ return current;
+ }
+ }
+ if (current.empty()) {
+ break;
+ }
+ current = current.parent_path();
+ }
+ return std::nullopt;
+}
+
+[[nodiscard]] auto ReadConfiguration(AnalysisArguments const& clargs) noexcept
+ -> Configuration {
+ if (not clargs.config_file.empty()) {
+ if (not std::filesystem::exists(clargs.config_file)) {
+ Logger::Log(LogLevel::Error,
+ "Config file {} does not exist.",
+ clargs.config_file.string());
+ std::exit(kExitFailure);
+ }
+ try {
+ std::ifstream fs(clargs.config_file);
+ auto map = Expression::FromJson(nlohmann::json::parse(fs));
+ if (not map->IsMap()) {
+ Logger::Log(LogLevel::Error,
+ "Config file {} does not contain a map.",
+ clargs.config_file.string());
+ std::exit(kExitFailure);
+ }
+ return Configuration{map};
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "Parsing config file {} failed with error:\n{}",
+ clargs.config_file.string(),
+ e.what());
+ std::exit(kExitFailure);
+ }
+ }
+ return Configuration{};
+}
+
+[[nodiscard]] auto DetermineCurrentModule(
+ std::filesystem::path const& workspace_root,
+ FileRoot const& target_root,
+ std::optional<std::string> const& target_file_name_opt) -> std::string {
+ auto cwd = std::filesystem::current_path();
+ auto subdir = std::filesystem::proximate(cwd, workspace_root);
+ if (subdir.is_relative() and (*subdir.begin() != "..")) {
+ // cwd is subdir of workspace_root
+ std::string target_file_name =
+ target_file_name_opt ? *target_file_name_opt : "TARGETS";
+ if (auto root_dir = FindRoot(subdir, target_root, {target_file_name})) {
+ return root_dir->string();
+ }
+ }
+ return ".";
+}
+
+[[nodiscard]] auto ReadConfiguredTarget(
+ AnalysisArguments const& clargs,
+ std::string const& main_repo,
+ std::optional<std::filesystem::path> const& main_ws_root)
+ -> Target::ConfiguredTarget {
+ auto const* target_root =
+ RepositoryConfig::Instance().TargetRoot(main_repo);
+ if (target_root == nullptr) {
+ Logger::Log(LogLevel::Error,
+ "Cannot obtain target root for main repo {}.",
+ main_repo);
+ std::exit(kExitFailure);
+ }
+ auto current_module = std::string{"."};
+ if (main_ws_root) {
+ // module detection only works if main workspace is on the file system
+ current_module = DetermineCurrentModule(
+ *main_ws_root, *target_root, clargs.target_file_name);
+ }
+ auto config = ReadConfiguration(clargs);
+ if (clargs.target) {
+ auto entity = Base::ParseEntityNameFromJson(
+ *clargs.target,
+ Base::EntityName{main_repo, current_module, ""},
+ [&clargs](std::string const& parse_err) {
+ Logger::Log(LogLevel::Error,
+ "Parsing target name {} failed with:\n{}.",
+ clargs.target->dump(),
+ parse_err);
+ });
+ if (not entity) {
+ std::exit(kExitFailure);
+ }
+ return Target::ConfiguredTarget{std::move(*entity), std::move(config)};
+ }
+ std::string target_file_name =
+ clargs.target_file_name ? *clargs.target_file_name : "TARGETS";
+ auto const target_file =
+ (std::filesystem::path{current_module} / target_file_name).string();
+ auto file_content = target_root->ReadFile(target_file);
+ if (not file_content) {
+ Logger::Log(LogLevel::Error, "Cannot read file {}.", target_file);
+ std::exit(kExitFailure);
+ }
+ auto const json = nlohmann::json::parse(*file_content);
+ if (not json.is_object()) {
+ Logger::Log(
+ LogLevel::Error, "Invalid content in target file {}.", target_file);
+ std::exit(kExitFailure);
+ }
+ if (json.empty()) {
+ Logger::Log(LogLevel::Error,
+ "Missing target descriptions in file {}.",
+ target_file);
+ std::exit(kExitFailure);
+ }
+ return Target::ConfiguredTarget{
+ Base::EntityName{main_repo, current_module, json.begin().key()},
+ std::move(config)};
+}
+
+template <HasToString K, typename V>
+[[nodiscard]] auto DetectAndReportCycle(std::string const& name,
+ AsyncMapConsumer<K, V> const& map)
+ -> bool {
+ using namespace std::string_literals;
+ auto cycle = map.DetectCycle();
+ if (cycle) {
+ bool found{false};
+ std::ostringstream oss{};
+ oss << fmt::format("Cycle detected in {}:", name) << std::endl;
+ for (auto const& k : *cycle) {
+ auto match = (k == cycle->back());
+ auto prefix{match ? found ? "`-- "s : ".-> "s
+ : found ? "| "s : " "s};
+ oss << prefix << k.ToString() << std::endl;
+ found = found or match;
+ }
+ Logger::Log(LogLevel::Error, "{}", oss.str());
+ return true;
+ }
+ return false;
+}
+
+template <HasToString K, typename V>
+void DetectAndReportPending(std::string const& name,
+ AsyncMapConsumer<K, V> const& map) {
+ using namespace std::string_literals;
+ auto keys = map.GetPendingKeys();
+ if (not keys.empty()) {
+ std::ostringstream oss{};
+ oss << fmt::format("Internal error, failed to evaluate pending {}:",
+ name)
+ << std::endl;
+ for (auto const& k : keys) {
+ oss << " " << k.ToString() << std::endl;
+ }
+ Logger::Log(LogLevel::Error, "{}", oss.str());
+ }
+}
+
+std::vector<std::string> const kRootMarkers{"ROOT", "WORKSPACE", ".git"};
+
+[[nodiscard]] auto DetermineWorkspaceRoot(CommonArguments const& clargs)
+ -> std::filesystem::path {
+ if (clargs.workspace_root) {
+ return *clargs.workspace_root;
+ }
+ auto cwd = std::filesystem::current_path();
+ auto root = cwd.root_path();
+ cwd = std::filesystem::relative(cwd, root);
+ auto root_dir = FindRoot(cwd, FileRoot{root}, kRootMarkers);
+ if (not root_dir) {
+ Logger::Log(LogLevel::Error, "Could not determine workspace root.");
+ std::exit(kExitFailure);
+ }
+ return root / *root_dir;
+}
+
+// returns FileRoot and optional local path, if the root is local
+auto ParseRoot(nlohmann::json desc,
+ const std::string& repo,
+ const std::string& keyword)
+ -> std::pair<FileRoot, std::optional<std::filesystem::path>> {
+ nlohmann::json root = desc[keyword];
+ if ((not root.is_array()) or root.empty()) {
+ Logger::Log(LogLevel::Error,
+ "Expected {} for {} to be of the form [<scheme>, ...], but "
+ "found {}",
+ keyword,
+ repo,
+ root.dump());
+ std::exit(kExitFailure);
+ }
+ if (root[0] == "file") {
+ if (root.size() != 2 or (not root[1].is_string())) {
+ Logger::Log(LogLevel::Error,
+ "\"file\" scheme expects precisely one string "
+ "argument, but found {} for {} of repository {}",
+ root.dump(),
+ keyword,
+ repo);
+ std::exit(kExitFailure);
+ }
+ auto path = std::filesystem::path{root[1]};
+ return {FileRoot{path}, std::move(path)};
+ }
+ if (root[0] == "git tree") {
+ if (root.size() != 3 or (not root[1].is_string()) or
+ (not root[2].is_string())) {
+ Logger::Log(LogLevel::Error,
+ "\"git tree\" scheme expects two string arguments, "
+ "but found {} for {} of repository {}",
+ root.dump(),
+ keyword,
+ repo);
+ std::exit(kExitFailure);
+ }
+ if (auto git_root = FileRoot::FromGit(root[2], root[1])) {
+ return {std::move(*git_root), std::nullopt};
+ }
+ Logger::Log(LogLevel::Error,
+ "Could not create file root for git repository {} and tree "
+ "id {}",
+ root[2],
+ root[1]);
+ std::exit(kExitFailure);
+ }
+ Logger::Log(LogLevel::Error,
+ "Unknown scheme in the specification {} of {} of repository {}",
+ root.dump(),
+ keyword,
+ repo);
+ std::exit(kExitFailure);
+}
+
+// Set all roots and name mappings from the command-line arguments and
+// return the name of the main repository and main workspace path if local.
+auto DetermineRoots(CommonArguments cargs, AnalysisArguments aargs)
+ -> std::pair<std::string, std::optional<std::filesystem::path>> {
+ std::optional<std::filesystem::path> main_ws_root;
+ auto repo_config = nlohmann::json::object();
+ if (cargs.repository_config) {
+ try {
+ std::ifstream fs(*cargs.repository_config);
+ repo_config = nlohmann::json::parse(fs);
+ if (not repo_config.is_object()) {
+ Logger::Log(
+ LogLevel::Error,
+ "Repository configuration file {} does not contain a map.",
+ (*cargs.repository_config).string());
+ std::exit(kExitFailure);
+ }
+ } catch (std::exception const& e) {
+ Logger::Log(LogLevel::Error,
+ "Parsing repository configuration file {} failed with "
+ "error:\n{}",
+ (*cargs.repository_config).string(),
+ e.what());
+ std::exit(kExitFailure);
+ }
+ }
+
+ std::string main_repo;
+
+ auto main_it = repo_config.find("main");
+ if (main_it != repo_config.end()) {
+ if (not main_it->is_string()) {
+ Logger::Log(LogLevel::Error,
+ "Repository config: main has to be a string");
+ std::exit(kExitFailure);
+ }
+ main_repo = *main_it;
+ }
+ if (cargs.main) {
+ main_repo = *cargs.main;
+ }
+
+ auto repos = nlohmann::json::object();
+ auto repos_it = repo_config.find("repositories");
+ if (repos_it != repo_config.end()) {
+ if (not repos_it->is_object()) {
+ Logger::Log(LogLevel::Error,
+ "Repository config: repositories has to be a map");
+ std::exit(kExitFailure);
+ }
+ repos = *repos_it;
+ }
+ if (not repos.contains(main_repo)) {
+ repos[main_repo] = nlohmann::json::object();
+ }
+
+ for (auto const& [repo, desc] : repos.items()) {
+ FileRoot ws_root{};
+
+ if (desc.contains("workspace_root")) {
+ auto [root, path] = ParseRoot(desc, repo, "workspace_root");
+ ws_root = std::move(root);
+ if (repo == main_repo) {
+ main_ws_root = std::move(path);
+ }
+ }
+ else if (repo == main_repo) {
+ main_ws_root = DetermineWorkspaceRoot(cargs);
+ ws_root = FileRoot{*main_ws_root};
+ }
+ else {
+ Logger::Log(
+ LogLevel::Error, "Unknown root for repository {}", repo);
+ std::exit(kExitFailure);
+ }
+ // TODO(aehlig): Handle root-naming scheme. So far, we assume ["file",
+ // dir] without checking.
+ auto info = RepositoryConfig::RepositoryInfo{std::move(ws_root)};
+ if (desc.contains("target_root")) {
+ info.target_root = ParseRoot(desc, repo, "target_root").first;
+ }
+ if (repo == main_repo && aargs.target_root) {
+ info.target_root = FileRoot{*aargs.target_root};
+ }
+ info.rule_root = info.target_root;
+ if (desc.contains("rule_root")) {
+ info.rule_root = ParseRoot(desc, repo, "rule_root").first;
+ }
+ if (repo == main_repo && aargs.rule_root) {
+ info.rule_root = FileRoot{*aargs.rule_root};
+ }
+ info.expression_root = info.rule_root;
+ if (desc.contains("expression_root")) {
+ info.expression_root =
+ ParseRoot(desc, repo, "expression_root").first;
+ }
+ if (repo == main_repo && aargs.expression_root) {
+ info.expression_root = FileRoot{*aargs.expression_root};
+ }
+
+ if (desc.contains("bindings")) {
+ if (not desc["bindings"].is_object()) {
+ Logger::Log(
+ LogLevel::Error,
+ "bindings has to be a string-string map, but found {}",
+ desc["bindings"].dump());
+ std::exit(kExitFailure);
+ }
+ for (auto const& [local_name, global_name] :
+ desc["bindings"].items()) {
+ if (not repos.contains(global_name)) {
+ Logger::Log(LogLevel::Error,
+ "Binding {} for {} in {} does not refer to a "
+ "defined repository.",
+ global_name,
+ local_name,
+ repo);
+ std::exit(kExitFailure);
+ }
+ info.name_mapping[local_name] = global_name;
+ }
+ }
+
+ if (desc.contains("target_file_name")) {
+ info.target_file_name = desc["target_file_name"];
+ }
+ if (repo == main_repo && aargs.target_file_name) {
+ info.target_file_name = *aargs.target_file_name;
+ }
+ if (desc.contains("rule_file_name")) {
+ info.rule_file_name = desc["rule_file_name"];
+ }
+ if (repo == main_repo && aargs.rule_file_name) {
+ info.rule_file_name = *aargs.rule_file_name;
+ }
+ if (desc.contains("expression_file_name")) {
+ info.expression_file_name = desc["expression_file_name"];
+ }
+ if (repo == main_repo && aargs.expression_file_name) {
+ info.expression_file_name = *aargs.expression_file_name;
+ }
+
+ RepositoryConfig::Instance().SetInfo(repo, std::move(info));
+ }
+
+ return {main_repo, main_ws_root};
+}
+
+struct AnalysisResult {
+ Target::ConfiguredTarget id;
+ AnalysedTargetPtr target;
+};
+
+[[nodiscard]] auto AnalyseTarget(
+ gsl::not_null<Target::ResultTargetMap*> const& result_map,
+ std::string const& main_repo,
+ std::optional<std::filesystem::path> const& main_ws_root,
+ std::size_t jobs,
+ AnalysisArguments const& clargs) -> std::optional<AnalysisResult> {
+ auto directory_entries = Base::CreateDirectoryEntriesMap(jobs);
+ auto expressions_file_map = Base::CreateExpressionFileMap(jobs);
+ auto rule_file_map = Base::CreateRuleFileMap(jobs);
+ auto targets_file_map = Base::CreateTargetsFileMap(jobs);
+ auto expr_map = Base::CreateExpressionMap(&expressions_file_map, jobs);
+ auto rule_map = Base::CreateRuleMap(&rule_file_map, &expr_map, jobs);
+ auto source_targets = Base::CreateSourceTargetMap(&directory_entries, jobs);
+ auto target_map = Target::CreateTargetMap(
+ &source_targets, &targets_file_map, &rule_map, result_map, jobs);
+
+ auto id = ReadConfiguredTarget(clargs, main_repo, main_ws_root);
+ std::shared_ptr<AnalysedTarget> target{};
+
+ bool failed{false};
+ {
+ TaskSystem ts{jobs};
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {id},
+ [&target](auto values) { target = *values[0]; },
+ [&failed](auto const& msg, bool fatal) {
+ Logger::Log(fatal ? LogLevel::Error : LogLevel::Warning,
+ "While processing targets:\n{}",
+ msg);
+ failed = failed or fatal;
+ });
+ }
+
+ if (failed) {
+ return std::nullopt;
+ }
+
+ if (not target) {
+ Logger::Log(
+ LogLevel::Error, "Failed to analyse target: {}", id.ToString());
+ if (not(DetectAndReportCycle("expression imports", expr_map) or
+ DetectAndReportCycle("target dependencies", target_map))) {
+ DetectAndReportPending("expressions", expr_map);
+ DetectAndReportPending("targets", expr_map);
+ }
+ return std::nullopt;
+ }
+
+ // Clean up in parallel what is no longer needed
+ {
+ TaskSystem ts{jobs};
+ target_map.Clear(&ts);
+ source_targets.Clear(&ts);
+ directory_entries.Clear(&ts);
+ expressions_file_map.Clear(&ts);
+ rule_file_map.Clear(&ts);
+ targets_file_map.Clear(&ts);
+ expr_map.Clear(&ts);
+ rule_map.Clear(&ts);
+ }
+
+ return AnalysisResult{id, target};
+}
+
+[[nodiscard]] auto ResultToJson(TargetResult const& result) -> nlohmann::json {
+ std::vector<std::string> artifacts{};
+ std::vector<std::string> runfiles{};
+ artifacts.reserve(result.artifact_stage->Map().size());
+ runfiles.reserve(result.runfiles->Map().size());
+ auto get_key = [](std::pair<std::string, ExpressionPtr> const& entry) {
+ return entry.first;
+ };
+ std::transform(result.artifact_stage->Map().begin(),
+ result.artifact_stage->Map().end(),
+ std::back_inserter(artifacts),
+ get_key);
+ std::transform(result.runfiles->Map().begin(),
+ result.runfiles->Map().end(),
+ std::back_inserter(runfiles),
+ get_key);
+ return nlohmann::ordered_json{
+ {"artifacts", artifacts},
+ {"runfiles", runfiles},
+ {"provides",
+ result.provides->ToJson(Expression::JsonMode::SerializeAllButNodes)}};
+}
+
+[[nodiscard]] auto TargetActionsToJson(AnalysedTargetPtr const& target)
+ -> nlohmann::json {
+ auto actions = nlohmann::json::array();
+ std::for_each(
+ target->Actions().begin(),
+ target->Actions().end(),
+ [&actions](auto const& action) { actions.push_back(action.ToJson()); });
+ return actions;
+}
+
+[[nodiscard]] auto TreesToJson(AnalysedTargetPtr const& target)
+ -> nlohmann::json {
+ auto trees = nlohmann::json::object();
+ std::for_each(
+ target->Trees().begin(),
+ target->Trees().end(),
+ [&trees](auto const& tree) { trees[tree.Id()] = tree.ToJson(); });
+
+ return trees;
+}
+
+void DumpActions(std::string const& file_path, AnalysisResult const& result) {
+ auto const dump_string =
+ IndentListsOnlyUntilDepth(TargetActionsToJson(result.target), 2, 1);
+ if (file_path == "-") {
+ Logger::Log(
+ LogLevel::Info, "Actions for target {}:", result.id.ToString());
+ std::cout << dump_string << std::endl;
+ }
+ else {
+ Logger::Log(LogLevel::Info,
+ "Dumping actions for target {} to file '{}'.",
+ result.id.ToString(),
+ file_path);
+ std::ofstream os(file_path);
+ os << dump_string << std::endl;
+ }
+}
+
+void DumpBlobs(std::string const& file_path, AnalysisResult const& result) {
+ auto blobs = nlohmann::json::array();
+ for (auto const& s : result.target->Blobs()) {
+ blobs.push_back(s);
+ }
+ auto const dump_string = blobs.dump(2);
+ if (file_path == "-") {
+ Logger::Log(
+ LogLevel::Info, "Blobs for target {}:", result.id.ToString());
+ std::cout << dump_string << std::endl;
+ }
+ else {
+ Logger::Log(LogLevel::Info,
+ "Dumping blobs for target {} to file '{}'.",
+ result.id.ToString(),
+ file_path);
+ std::ofstream os(file_path);
+ os << dump_string << std::endl;
+ }
+}
+
+void DumpTrees(std::string const& file_path, AnalysisResult const& result) {
+ auto const dump_string = TreesToJson(result.target).dump(2);
+ if (file_path == "-") {
+ Logger::Log(
+ LogLevel::Info, "Trees for target {}:", result.id.ToString());
+ std::cout << dump_string << std::endl;
+ }
+ else {
+ Logger::Log(LogLevel::Info,
+ "Dumping trees for target {} to file '{}'.",
+ result.id.ToString(),
+ file_path);
+ std::ofstream os(file_path);
+ os << dump_string << std::endl;
+ }
+}
+
+void DumpTargets(std::string const& file_path,
+ std::vector<Target::ConfiguredTarget> const& target_ids) {
+ auto repo_map = nlohmann::json::object();
+ auto conf_list =
+ [&repo_map](Base::EntityName const& ref) -> nlohmann::json& {
+ if (ref.IsAnonymousTarget()) {
+ auto& anon_map = repo_map[Base::EntityName::kAnonymousMarker];
+ auto& rule_map = anon_map[ref.anonymous->rule_map.ToIdentifier()];
+ return rule_map[ref.anonymous->target_node.ToIdentifier()];
+ }
+ auto& location_map = repo_map[Base::EntityName::kLocationMarker];
+ auto& module_map = location_map[ref.repository];
+ auto& target_map = module_map[ref.module];
+ return target_map[ref.name];
+ };
+ std::for_each(
+ target_ids.begin(), target_ids.end(), [&conf_list](auto const& id) {
+ conf_list(id.target).push_back(id.config.ToJson());
+ });
+ auto const dump_string = IndentListsOnlyUntilDepth(repo_map, 2);
+ if (file_path == "-") {
+ Logger::Log(LogLevel::Info, "List of analysed targets:");
+ std::cout << dump_string << std::endl;
+ }
+ else {
+ Logger::Log(LogLevel::Info,
+ "Dumping list of analysed targets to file '{}'.",
+ file_path);
+ std::ofstream os(file_path);
+ os << dump_string << std::endl;
+ }
+}
+
+auto DumpExpressionToMap(gsl::not_null<nlohmann::json*> const& map,
+ ExpressionPtr const& expr) -> bool {
+ auto const& id = expr->ToIdentifier();
+ if (not map->contains(id)) {
+ (*map)[id] = expr->ToJson();
+ return true;
+ }
+ return false;
+}
+
+void DumpNodesInExpressionToMap(gsl::not_null<nlohmann::json*> const& map,
+ ExpressionPtr const& expr) {
+ if (expr->IsNode()) {
+ if (DumpExpressionToMap(map, expr)) {
+ auto const& node = expr->Node();
+ if (node.IsAbstract()) {
+ DumpNodesInExpressionToMap(map,
+ node.GetAbstract().target_fields);
+ }
+ else if (node.IsValue()) {
+ DumpNodesInExpressionToMap(map, node.GetValue());
+ }
+ }
+ }
+ else if (expr->IsList()) {
+ for (auto const& entry : expr->List()) {
+ DumpNodesInExpressionToMap(map, entry);
+ }
+ }
+ else if (expr->IsMap()) {
+ for (auto const& [_, value] : expr->Map()) {
+ DumpNodesInExpressionToMap(map, value);
+ }
+ }
+ else if (expr->IsResult()) {
+ DumpNodesInExpressionToMap(map, expr->Result().provides);
+ }
+}
+
+void DumpAnonymous(std::string const& file_path,
+ std::vector<Target::ConfiguredTarget> const& target_ids) {
+ auto anon_map = nlohmann::json{{"nodes", nlohmann::json::object()},
+ {"rule_maps", nlohmann::json::object()}};
+ std::for_each(
+ target_ids.begin(), target_ids.end(), [&anon_map](auto const& id) {
+ if (id.target.IsAnonymousTarget()) {
+ DumpExpressionToMap(&anon_map["rule_maps"],
+ id.target.anonymous->rule_map);
+ DumpNodesInExpressionToMap(&anon_map["nodes"],
+ id.target.anonymous->target_node);
+ }
+ });
+ auto const dump_string = IndentListsOnlyUntilDepth(anon_map, 2);
+ if (file_path == "-") {
+ Logger::Log(LogLevel::Info, "List of anonymous target data:");
+ std::cout << dump_string << std::endl;
+ }
+ else {
+ Logger::Log(LogLevel::Info,
+ "Dumping list of anonymous target data to file '{}'.",
+ file_path);
+ std::ofstream os(file_path);
+ os << dump_string << std::endl;
+ }
+}
+
+void DumpNodes(std::string const& file_path, AnalysisResult const& result) {
+ auto node_map = nlohmann::json::object();
+ DumpNodesInExpressionToMap(&node_map, result.target->Provides());
+ auto const dump_string = IndentListsOnlyUntilDepth(node_map, 2);
+ if (file_path == "-") {
+ Logger::Log(
+ LogLevel::Info, "Target nodes of target {}:", result.id.ToString());
+ std::cout << dump_string << std::endl;
+ }
+ else {
+ Logger::Log(LogLevel::Info,
+ "Dumping target nodes of target {} to file '{}'.",
+ result.id.ToString(),
+ file_path);
+ std::ofstream os(file_path);
+ os << dump_string << std::endl;
+ }
+}
+
+[[nodiscard]] auto DiagnoseResults(AnalysisResult const& result,
+ Target::ResultTargetMap const& result_map,
+ DiagnosticArguments const& clargs) {
+ Logger::Log(LogLevel::Info,
+ "Result of target {}: {}",
+ result.id.ToString(),
+ ResultToJson(result.target->Result()).dump(2));
+ if (clargs.dump_actions) {
+ DumpActions(*clargs.dump_actions, result);
+ }
+ if (clargs.dump_blobs) {
+ DumpBlobs(*clargs.dump_blobs, result);
+ }
+ if (clargs.dump_trees) {
+ DumpTrees(*clargs.dump_trees, result);
+ }
+ if (clargs.dump_targets) {
+ DumpTargets(*clargs.dump_targets, result_map.ConfiguredTargets());
+ }
+ if (clargs.dump_anonymous) {
+ DumpAnonymous(*clargs.dump_anonymous, result_map.ConfiguredTargets());
+ }
+ if (clargs.dump_nodes) {
+ DumpNodes(*clargs.dump_nodes, result);
+ }
+}
+
+// Return disjoint maps for artifacts and runfiles
+[[nodiscard]] auto ReadOutputArtifacts(AnalysedTargetPtr const& target)
+ -> std::pair<std::map<std::string, ArtifactDescription>,
+ std::map<std::string, ArtifactDescription>> {
+ std::map<std::string, ArtifactDescription> artifacts{};
+ std::map<std::string, ArtifactDescription> runfiles{};
+ for (auto const& [path, artifact] : target->Artifacts()->Map()) {
+ artifacts.emplace(path, artifact->Artifact());
+ }
+ for (auto const& [path, artifact] : target->RunFiles()->Map()) {
+ if (not artifacts.contains(path)) {
+ runfiles.emplace(path, artifact->Artifact());
+ }
+ }
+ return {artifacts, runfiles};
+}
+
+void ReportTaintedness(const AnalysisResult& result) {
+ if (result.target->Tainted().empty()) {
+ // Never report untainted targets
+ return;
+ }
+
+ // To ensure proper quoting, go through json.
+ nlohmann::json tainted{};
+ for (auto const& s : result.target->Tainted()) {
+ tainted.push_back(s);
+ }
+ Logger::Log(LogLevel::Info, "Target tainted {}.", tainted.dump());
+}
+
+#ifndef BOOTSTRAP_BUILD_TOOL
+[[nodiscard]] auto FetchAndInstallArtifacts(
+ gsl::not_null<IExecutionApi*> const& api,
+ FetchArguments const& clargs) -> bool {
+ auto object_info = Artifact::ObjectInfo::FromString(clargs.object_id);
+ if (not object_info) {
+ Logger::Log(
+ LogLevel::Error, "failed to parse object id {}.", clargs.object_id);
+ return false;
+ }
+
+ if (clargs.output_path) {
+ auto output_path = (*clargs.output_path / "").parent_path();
+ if (FileSystemManager::IsDirectory(output_path)) {
+ output_path /= object_info->digest.hash();
+ }
+
+ if (not FileSystemManager::CreateDirectory(output_path.parent_path()) or
+ not api->RetrieveToPaths({*object_info}, {output_path})) {
+ Logger::Log(LogLevel::Error, "failed to retrieve artifact.");
+ return false;
+ }
+
+ Logger::Log(LogLevel::Info,
+ "artifact {} was installed to {}",
+ object_info->ToString(),
+ output_path.string());
+ }
+ else { // dump to stdout
+ if (not api->RetrieveToFds({*object_info}, {dup(fileno(stdout))})) {
+ Logger::Log(LogLevel::Error, "failed to dump artifact.");
+ return false;
+ }
+ }
+
+ return true;
+}
+#endif
+
+void PrintDoc(const nlohmann::json& doc, const std::string& indent) {
+ if (not doc.is_array()) {
+ return;
+ }
+ for (auto const& line : doc) {
+ if (line.is_string()) {
+ std::cout << indent << line.get<std::string>() << "\n";
+ }
+ }
+}
+
+void PrintFields(nlohmann::json const& fields,
+ const nlohmann::json& fdoc,
+ const std::string& indent_field,
+ const std::string& indent_field_doc) {
+ for (auto const& f : fields) {
+ std::cout << indent_field << f << "\n";
+ auto doc = fdoc.find(f);
+ if (doc != fdoc.end()) {
+ PrintDoc(*doc, indent_field_doc);
+ }
+ }
+}
+
+auto DescribeTarget(std::string const& main_repo,
+ std::optional<std::filesystem::path> const& main_ws_root,
+ std::size_t jobs,
+ AnalysisArguments const& clargs) -> int {
+ auto id = ReadConfiguredTarget(clargs, main_repo, main_ws_root);
+ if (id.target.explicit_file_reference) {
+ std::cout << id.ToString() << " is a source file." << std::endl;
+ return kExitSuccess;
+ }
+ auto targets_file_map = Base::CreateTargetsFileMap(jobs);
+ nlohmann::json targets_file{};
+ bool failed{false};
+ {
+ TaskSystem ts{jobs};
+ targets_file_map.ConsumeAfterKeysReady(
+ &ts,
+ {id.target.ToModule()},
+ [&targets_file](auto values) { targets_file = *values[0]; },
+ [&failed](auto const& msg, bool fatal) {
+ Logger::Log(fatal ? LogLevel::Error : LogLevel::Warning,
+ "While searching for target description:\n{}",
+ msg);
+ failed = failed or fatal;
+ });
+ }
+ if (failed) {
+ return kExitFailure;
+ }
+ auto desc_it = targets_file.find(id.target.name);
+ if (desc_it == targets_file.end()) {
+ std::cout << id.ToString() << " is implicitly a source file."
+ << std::endl;
+ return kExitSuccess;
+ }
+ nlohmann::json desc = *desc_it;
+ auto rule_it = desc.find("type");
+ if (rule_it == desc.end()) {
+ Logger::Log(LogLevel::Error,
+ "{} is a target without specified type.",
+ id.ToString());
+ return kExitFailure;
+ }
+ if (BuildMaps::Target::IsBuiltInRule(*rule_it)) {
+ std::cout << id.ToString() << " is defined by built-in rule "
+ << rule_it->dump() << "." << std::endl;
+ if (*rule_it == "export") {
+ // export targets may have doc fields of their own.
+ auto doc = desc.find("doc");
+ if (doc != desc.end()) {
+ PrintDoc(*doc, " | ");
+ }
+ auto config_doc = nlohmann::json::object();
+ auto config_doc_it = desc.find("config_doc");
+ if (config_doc_it != desc.end() and config_doc_it->is_object()) {
+ config_doc = *config_doc_it;
+ }
+ auto flexible_config = desc.find("flexible_config");
+ if (flexible_config != desc.end() and
+ (not flexible_config->empty())) {
+ std::cout << " Flexible configuration variables\n";
+ PrintFields(*flexible_config, config_doc, " - ", " | ");
+ }
+ }
+ return kExitSuccess;
+ }
+ auto rule_name = BuildMaps::Base::ParseEntityNameFromJson(
+ *rule_it, id.target, [&rule_it, &id](std::string const& parse_err) {
+ Logger::Log(LogLevel::Error,
+ "Parsing rule name {} for target {} failed with:\n{}.",
+ rule_it->dump(),
+ id.ToString(),
+ parse_err);
+ });
+ if (not rule_name) {
+ return kExitFailure;
+ }
+ auto rule_file_map = Base::CreateRuleFileMap(jobs);
+ nlohmann::json rules_file;
+ {
+ TaskSystem ts{jobs};
+ rule_file_map.ConsumeAfterKeysReady(
+ &ts,
+ {rule_name->ToModule()},
+ [&rules_file](auto values) { rules_file = *values[0]; },
+ [&failed](auto const& msg, bool fatal) {
+ Logger::Log(fatal ? LogLevel::Error : LogLevel::Warning,
+ "While searching for rule definition:\n{}",
+ msg);
+ failed = failed or fatal;
+ });
+ }
+ if (failed) {
+ return kExitFailure;
+ }
+ auto ruledesc_it = rules_file.find(rule_name->name);
+ if (ruledesc_it == rules_file.end()) {
+ Logger::Log(LogLevel::Error,
+ "Rule definition of {} is missing",
+ rule_name->ToString());
+ return kExitFailure;
+ }
+ std::cout << id.ToString() << " is defined by user-defined rule "
+ << rule_name->ToString() << ".\n\n";
+ auto const& rdesc = *ruledesc_it;
+ auto doc = rdesc.find("doc");
+ if (doc != rdesc.end()) {
+ PrintDoc(*doc, " | ");
+ }
+ auto field_doc = nlohmann::json::object();
+ auto field_doc_it = rdesc.find("field_doc");
+ if (field_doc_it != rdesc.end() and field_doc_it->is_object()) {
+ field_doc = *field_doc_it;
+ }
+ auto string_fields = rdesc.find("string_fields");
+ if (string_fields != rdesc.end() and (not string_fields->empty())) {
+ std::cout << " String fields\n";
+ PrintFields(*string_fields, field_doc, " - ", " | ");
+ }
+ auto target_fields = rdesc.find("target_fields");
+ if (target_fields != rdesc.end() and (not target_fields->empty())) {
+ std::cout << " Target fields\n";
+ PrintFields(*target_fields, field_doc, " - ", " | ");
+ }
+ auto config_doc = nlohmann::json::object();
+ auto config_doc_it = rdesc.find("config_doc");
+ if (config_doc_it != rdesc.end() and config_doc_it->is_object()) {
+ config_doc = *config_doc_it;
+ }
+ auto config_vars = rdesc.find("config_vars");
+ if (config_vars != rdesc.end() and (not config_vars->empty())) {
+ std::cout << " Variables taken from the configuration\n";
+ PrintFields(*config_vars, config_doc, " - ", " | ");
+ }
+ std::cout << std::endl;
+ return kExitSuccess;
+}
+
+void DumpArtifactsToBuild(
+ std::map<std::string, ArtifactDescription> const& artifacts,
+ std::map<std::string, ArtifactDescription> const& runfiles,
+ const std::filesystem::path& file_path) {
+ nlohmann::json to_build{};
+ for (auto const& [path, artifact] : runfiles) {
+ to_build[path] = artifact.ToJson();
+ }
+ for (auto const& [path, artifact] : artifacts) {
+ to_build[path] = artifact.ToJson();
+ }
+ auto const dump_string = IndentListsOnlyUntilDepth(to_build, 2, 1);
+ std::ofstream os(file_path);
+ os << dump_string << std::endl;
+}
+
+} // namespace
+
+auto main(int argc, char* argv[]) -> int {
+ try {
+ auto arguments = ParseCommandLineArguments(argc, argv);
+
+ SetupLogging(arguments.common);
+#ifndef BOOTSTRAP_BUILD_TOOL
+ SetupLocalExecution(arguments.endpoint, arguments.build);
+#endif
+
+ auto jobs = arguments.build.build_jobs > 0 ? arguments.build.build_jobs
+ : arguments.common.jobs;
+
+ auto stage_args = arguments.cmd == SubCommand::kInstall or
+ arguments.cmd == SubCommand::kInstallCas or
+ arguments.cmd == SubCommand::kTraverse
+ ? std::make_optional(std::move(arguments.stage))
+ : std::nullopt;
+
+ auto rebuild_args =
+ arguments.cmd == SubCommand::kRebuild
+ ? std::make_optional(std::move(arguments.rebuild))
+ : std::nullopt;
+
+#ifndef BOOTSTRAP_BUILD_TOOL
+ GraphTraverser const traverser{{jobs,
+ std::move(arguments.endpoint),
+ std::move(arguments.build),
+ std::move(stage_args),
+ std::move(rebuild_args)}};
+
+ if (arguments.cmd == SubCommand::kInstallCas) {
+ return FetchAndInstallArtifacts(traverser.ExecutionApi(),
+ arguments.fetch)
+ ? kExitSuccess
+ : kExitFailure;
+ }
+#endif
+
+ auto [main_repo, main_ws_root] =
+ DetermineRoots(arguments.common, arguments.analysis);
+
+#ifndef BOOTSTRAP_BUILD_TOOL
+ if (arguments.cmd == SubCommand::kTraverse) {
+ if (arguments.graph.git_cas) {
+ if (not RepositoryConfig::Instance().SetGitCAS(
+ *arguments.graph.git_cas)) {
+ Logger::Log(LogLevel::Warning,
+ "Failed set Git CAS {}.",
+ arguments.graph.git_cas->string());
+ }
+ }
+ if (traverser.BuildAndStage(arguments.graph.graph_file,
+ arguments.graph.artifacts)) {
+ return kExitSuccess;
+ }
+ }
+ else if (arguments.cmd == SubCommand::kDescribe) {
+ return DescribeTarget(main_repo,
+ main_ws_root,
+ arguments.common.jobs,
+ arguments.analysis);
+ }
+
+ else {
+#endif
+ BuildMaps::Target::ResultTargetMap result_map{
+ arguments.common.jobs};
+ auto result = AnalyseTarget(&result_map,
+ main_repo,
+ main_ws_root,
+ arguments.common.jobs,
+ arguments.analysis);
+ if (result) {
+ if (arguments.analysis.graph_file) {
+ result_map.ToFile(*arguments.analysis.graph_file);
+ }
+ auto const [artifacts, runfiles] =
+ ReadOutputArtifacts(result->target);
+ if (arguments.analysis.artifacts_to_build_file) {
+ DumpArtifactsToBuild(
+ artifacts,
+ runfiles,
+ *arguments.analysis.artifacts_to_build_file);
+ }
+ if (arguments.cmd == SubCommand::kAnalyse) {
+ DiagnoseResults(*result, result_map, arguments.diagnose);
+ ReportTaintedness(*result);
+ // Clean up in parallel
+ {
+ TaskSystem ts;
+ result_map.Clear(&ts);
+ }
+ return kExitSuccess;
+ }
+#ifndef BOOTSTRAP_BUILD_TOOL
+ auto const& [actions, blobs, trees] = result_map.ToResult();
+
+ // Clean up result map, now that it is no longer needed
+ {
+ TaskSystem ts;
+ result_map.Clear(&ts);
+ }
+
+ Logger::Log(
+ LogLevel::Info,
+ "{}ing {}.",
+ arguments.cmd == SubCommand::kRebuild ? "Rebuild" : "Build",
+ result->id.ToString());
+ ReportTaintedness(*result);
+
+ auto build_result = traverser.BuildAndStage(
+ artifacts, runfiles, actions, blobs, trees);
+ if (build_result) {
+ // Repeat taintedness message to make the user aware that
+ // the artifacts are not for production use.
+ ReportTaintedness(*result);
+ return build_result->second ? kExitSuccessFailedArtifacts
+ : kExitSuccess;
+ }
+ }
+#endif
+ }
+ } catch (std::exception const& ex) {
+ Logger::Log(
+ LogLevel::Error, "Caught exception with message: {}", ex.what());
+ }
+ return kExitFailure;
+}
diff --git a/src/buildtool/main/main.hpp b/src/buildtool/main/main.hpp
new file mode 100644
index 00000000..6212d86e
--- /dev/null
+++ b/src/buildtool/main/main.hpp
@@ -0,0 +1,10 @@
+#ifndef INCLUDED_SRC_BUILDOOL_MAIN_MAIN_HPP
+#define INCLUDED_SRC_BUILDOOL_MAIN_MAIN_HPP
+
+enum ExitCodes {
+ kExitSuccess = 0,
+ kExitFailure = 1,
+ kExitSuccessFailedArtifacts = 2
+};
+
+#endif
diff --git a/src/buildtool/multithreading/TARGETS b/src/buildtool/multithreading/TARGETS
new file mode 100644
index 00000000..f14d42bb
--- /dev/null
+++ b/src/buildtool/multithreading/TARGETS
@@ -0,0 +1,54 @@
+{ "task":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["task"]
+ , "hdrs": ["task.hpp"]
+ , "stage": ["src", "buildtool", "multithreading"]
+ }
+, "notification_queue":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["notification_queue"]
+ , "hdrs": ["notification_queue.hpp"]
+ , "deps": ["task", ["src/utils/cpp", "atomic"]]
+ , "stage": ["src", "buildtool", "multithreading"]
+ }
+, "task_system":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["task_system"]
+ , "hdrs": ["task_system.hpp"]
+ , "srcs": ["task_system.cpp"]
+ , "deps": ["notification_queue", "task", ["@", "gsl-lite", "", "gsl-lite"]]
+ , "stage": ["src", "buildtool", "multithreading"]
+ }
+, "async_map_node":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["async_map_node"]
+ , "hdrs": ["async_map_node.hpp"]
+ , "deps": ["task", "task_system", ["@", "gsl-lite", "", "gsl-lite"]]
+ , "stage": ["src", "buildtool", "multithreading"]
+ }
+, "async_map":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["async_map"]
+ , "hdrs": ["async_map.hpp"]
+ , "deps":
+ [ "task"
+ , "task_system"
+ , "async_map_node"
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "multithreading"]
+ }
+, "async_map_consumer":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["async_map_consumer"]
+ , "hdrs": ["async_map_consumer.hpp"]
+ , "deps":
+ [ "task"
+ , "task_system"
+ , "async_map_node"
+ , "async_map"
+ , ["@", "gsl-lite", "", "gsl-lite"]
+ ]
+ , "stage": ["src", "buildtool", "multithreading"]
+ }
+} \ No newline at end of file
diff --git a/src/buildtool/multithreading/async_map.hpp b/src/buildtool/multithreading/async_map.hpp
new file mode 100644
index 00000000..80d5b0a3
--- /dev/null
+++ b/src/buildtool/multithreading/async_map.hpp
@@ -0,0 +1,109 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_MULTITHREADING_ASYNC_MAP_HPP
+#define INCLUDED_SRC_BUILDTOOL_MULTITHREADING_ASYNC_MAP_HPP
+
+#include <memory>
+#include <mutex> // unique_lock
+#include <shared_mutex>
+#include <thread>
+#include <unordered_map>
+#include <utility> // std::make_pair to use std::unordered_map's emplace()
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/multithreading/async_map_node.hpp"
+#include "src/buildtool/multithreading/task.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+
+// Wrapper around map data structure for KeyT->AsyncMapNode<ValueT> that only
+// exposes the possibility to retrieve the node for a certain key, adding it in
+// case of the key not yet being present. Thread-safe. Map look-ups happen under
+// a shared lock, and only in the case that key needs to be added to the
+// underlying map we uniquely lock. This is the default map class used inside
+// AsyncMapConsumer
+template <typename KeyT, typename ValueT>
+class AsyncMap {
+ public:
+ using Node = AsyncMapNode<KeyT, ValueT>;
+ // Nodes will be passed onto tasks. Nodes are owned by this map. Nodes are
+ // alive as long as this map lives.
+ using NodePtr = Node*;
+
+ explicit AsyncMap(std::size_t jobs) : width_{ComputeWidth(jobs)} {}
+
+ AsyncMap() = default;
+
+ /// \brief Retrieve node for certain key. Key and new node are emplaced in
+ /// the map in case that the key does not exist already.
+ /// \returns shared pointer to the Node associated to given key
+ [[nodiscard]] auto GetOrCreateNode(KeyT const& key) -> NodePtr {
+ auto* node_or_null = GetNodeOrNullFromSharedMap(key);
+ return node_or_null != nullptr ? node_or_null : AddKey(key);
+ }
+
+ [[nodiscard]] auto GetPendingKeys() const -> std::vector<KeyT> {
+ std::vector<KeyT> keys{};
+ size_t s = 0;
+ for (auto& i : map_) {
+ s += i.size();
+ }
+
+ keys.reserve(s);
+ for (auto& i : map_) {
+ for (auto const& [key, node] : i) {
+ if (not node->IsReady()) {
+ keys.emplace_back(key);
+ }
+ }
+ }
+ return keys;
+ }
+
+ void Clear(gsl::not_null<TaskSystem*> const& ts) {
+ for (std::size_t i = 0; i < width_; ++i) {
+ ts->QueueTask([i, this]() { map_[i].clear(); });
+ }
+ }
+
+ private:
+ constexpr static std::size_t kScalingFactor = 2;
+ std::size_t width_{ComputeWidth(0)};
+ std::vector<std::shared_mutex> m_{width_};
+ std::vector<std::unordered_map<KeyT, std::unique_ptr<Node>>> map_{width_};
+
+ constexpr static auto ComputeWidth(std::size_t jobs) -> std::size_t {
+ if (jobs <= 0) {
+ // Non-positive indicates to use the default value
+ return ComputeWidth(
+ std::max(1U, std::thread::hardware_concurrency()));
+ }
+ return jobs * kScalingFactor + 1;
+ }
+
+ [[nodiscard]] auto GetNodeOrNullFromSharedMap(KeyT const& key) -> NodePtr {
+ auto part = std::hash<KeyT>{}(key) % width_;
+ std::shared_lock sl{m_[part]};
+ auto it_to_key_pair = map_[part].find(key);
+ if (it_to_key_pair != map_[part].end()) {
+ // we know if the key is in the map then
+ // the pair {key, node} is read only
+ return it_to_key_pair->second.get();
+ }
+ return nullptr;
+ }
+
+ [[nodiscard]] auto AddKey(KeyT const& key) -> NodePtr {
+ auto part = std::hash<KeyT>{}(key) % width_;
+ std::unique_lock ul{m_[part]};
+ auto it_to_key_pair = map_[part].find(key);
+ if (it_to_key_pair != map_[part].end()) {
+ return it_to_key_pair->second.get();
+ }
+ auto new_node = std::make_unique<Node>(key);
+ bool unused{};
+ std::tie(it_to_key_pair, unused) =
+ map_[part].emplace(std::make_pair(key, std::move(new_node)));
+ return it_to_key_pair->second.get();
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_MULTITHREADING_ASYNC_MAP_HPP
diff --git a/src/buildtool/multithreading/async_map_consumer.hpp b/src/buildtool/multithreading/async_map_consumer.hpp
new file mode 100644
index 00000000..eb965d4e
--- /dev/null
+++ b/src/buildtool/multithreading/async_map_consumer.hpp
@@ -0,0 +1,331 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_MULTITHREADING_ASYNC_MAP_CONSUMER_HPP
+#define INCLUDED_SRC_BUILDTOOL_MULTITHREADING_ASYNC_MAP_CONSUMER_HPP
+
+#include <atomic>
+#include <condition_variable>
+#include <functional>
+#include <mutex>
+#include <shared_mutex>
+#include <thread>
+#include <unordered_map>
+#include <unordered_set>
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/multithreading/async_map.hpp"
+#include "src/buildtool/multithreading/async_map_node.hpp"
+#include "src/buildtool/multithreading/task.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+
+using AsyncMapConsumerLogger = std::function<void(std::string const&, bool)>;
+using AsyncMapConsumerLoggerPtr = std::shared_ptr<AsyncMapConsumerLogger>;
+
+// Thread safe class that enables us to add tasks to the queue system that
+// depend on values being ready. Value constructors are only queued once per key
+// and tasks that depend on such values are only queued once the values are
+// ready. As template parameters, it takes the type that keys will have, the
+// type that their corresponding values will have and the type of the underlying
+// thread-safe associative container. The default thread-safe associative
+// container is AsyncMap<Key, Value> and any substite must have the same public
+// interface to be used in AsyncMapConsumer.
+template <typename Key, typename Value, typename Map = AsyncMap<Key, Value>>
+class AsyncMapConsumer {
+ public:
+ using Node = typename Map::Node;
+ using NodePtr = typename Map::NodePtr;
+
+ using Setter = std::function<void(Value&&)>;
+ using SetterPtr = std::shared_ptr<Setter>;
+
+ using Logger = AsyncMapConsumerLogger;
+ using LoggerPtr = AsyncMapConsumerLoggerPtr;
+
+ using FailureFunction = std::function<void()>;
+ using FailureFunctionPtr = std::shared_ptr<FailureFunction>;
+
+ using Consumer = std::function<void(std::vector<Value const*> const&)>;
+ using ConsumerPtr = std::shared_ptr<Consumer>;
+
+ using SubCaller =
+ std::function<void(std::vector<Key> const&, Consumer, LoggerPtr)>;
+ using SubCallerPtr = std::shared_ptr<SubCaller>;
+
+ using ValueCreator = std::function<void(gsl::not_null<TaskSystem*> const&,
+ SetterPtr,
+ LoggerPtr,
+ SubCallerPtr,
+ Key const&)>;
+
+ explicit AsyncMapConsumer(ValueCreator vc, std::size_t jobs = 0)
+ : value_creator_{std::make_shared<ValueCreator>(std::move(vc))},
+ map_{jobs} {}
+
+ /// \brief Makes sure that the consumer will be executed once the values for
+ /// all the keys are available, and that the value creators for those keys
+ /// are queued (if they weren't queued already).
+ /// \param[in] ts task system
+ /// \param[in] keys keys for the values that consumer requires
+ /// \param[in] consumer function-like object that takes a vector of values
+ /// and returns void that will be queued to be called with the values
+ /// associated to keys once they are ready
+ /// \param[in] logger function-like object that takes a string and a bool
+ /// indicating that the event was fatal and returns
+ /// void. This will be passed around and can be used to report errors
+ /// (possibly with side effects outside AsyncMapConsumer) in the value
+ /// creator
+ /// \param[in] fail function to call instead of the consumer if the
+ /// creation of this node failed
+ void ConsumeAfterKeysReady(gsl::not_null<TaskSystem*> const& ts,
+ std::vector<Key> const& keys,
+ Consumer&& consumer,
+ Logger&& logger,
+ FailureFunction&& fail) {
+ ConsumeAfterKeysReady(
+ ts,
+ std::nullopt,
+ keys,
+ std::move(consumer),
+ std::make_shared<Logger>(std::move(logger)),
+ std::make_shared<FailureFunction>(std::move(fail)));
+ }
+
+ // Similar to the previous method, but without failure function
+ void ConsumeAfterKeysReady(gsl::not_null<TaskSystem*> const& ts,
+ std::vector<Key> const& keys,
+ Consumer&& consumer,
+ Logger&& logger) {
+ ConsumeAfterKeysReady(ts,
+ std::nullopt,
+ keys,
+ std::move(consumer),
+ std::make_shared<Logger>(std::move(logger)),
+ nullptr);
+ }
+
+ [[nodiscard]] auto GetPendingKeys() const -> std::vector<Key> {
+ return map_.GetPendingKeys();
+ }
+
+ // Returns call order of the first cycle found in the requests map.
+ [[nodiscard]] auto DetectCycle() const -> std::optional<std::vector<Key>> {
+ auto const& requests = GetPendingRequests();
+ std::vector<Key> calls{};
+ std::unordered_set<Key> known{};
+ calls.resize(requests.size() + 1, Key{});
+ known.reserve(requests.size());
+ for (auto const& [caller, _] : requests) {
+ if (DetectCycleForCaller(&calls, &known, requests, caller)) {
+ return calls;
+ }
+ }
+ return std::nullopt;
+ }
+
+ void Clear(gsl::not_null<TaskSystem*> const& ts) { map_.Clear(ts); }
+
+ private:
+ using NodeRequests = std::unordered_map<Key, std::unordered_set<NodePtr>>;
+
+ std::shared_ptr<ValueCreator> value_creator_{};
+ Map map_{};
+ mutable std::shared_mutex requests_m_{};
+ std::unordered_map<std::thread::id, NodeRequests> requests_by_thread_{};
+
+ // Similar to previous methods, but in this case the logger and failure
+ // function are already std::shared_ptr type.
+ void ConsumeAfterKeysReady(gsl::not_null<TaskSystem*> const& ts,
+ std::optional<Key> const& consumer_id,
+ std::vector<Key> const& keys,
+ Consumer&& consumer,
+ LoggerPtr&& logger,
+ FailureFunctionPtr&& fail) {
+ auto consumerptr = std::make_shared<Consumer>(std::move(consumer));
+ if (keys.empty()) {
+ ts->QueueTask([consumerptr = std::move(consumerptr)]() {
+ (*consumerptr)({});
+ });
+ return;
+ }
+
+ auto nodes = EnsureValuesEventuallyPresent(ts, keys, std::move(logger));
+ auto first_node = nodes->at(0);
+ if (fail) {
+ first_node->QueueOnFailure(ts, [fail]() { (*fail)(); });
+ }
+ auto const queued = first_node->AddOrQueueAwaitingTask(
+ ts,
+ [ts,
+ consumerptr,
+ nodes = std::move(nodes),
+ fail,
+ this,
+ consumer_id]() {
+ QueueTaskWhenAllReady(
+ ts, consumer_id, consumerptr, fail, nodes, 1);
+ });
+ if (consumer_id and not queued) {
+ RecordNodeRequest(*consumer_id, first_node);
+ }
+ }
+
+ [[nodiscard]] auto EnsureValuesEventuallyPresent(
+ gsl::not_null<TaskSystem*> const& ts,
+ std::vector<Key> const& keys,
+ LoggerPtr&& logger) -> std::shared_ptr<std::vector<NodePtr>> {
+ std::vector<NodePtr> nodes{};
+ nodes.reserve(keys.size());
+ std::transform(std::begin(keys),
+ std::end(keys),
+ std::back_inserter(nodes),
+ [this, ts, logger](Key const& key) {
+ return EnsureValuePresent(ts, key, logger);
+ });
+ return std::make_shared<std::vector<NodePtr>>(std::move(nodes));
+ }
+
+ // Retrieves node from map associated to given key and queues its processing
+ // task (i.e. a task that executes the value creator) to the task system.
+ // Note that the node will only queue a processing task once.
+ [[nodiscard]] auto EnsureValuePresent(gsl::not_null<TaskSystem*> const& ts,
+ Key const& key,
+ LoggerPtr const& logger) -> NodePtr {
+ auto node = map_.GetOrCreateNode(key);
+ auto setterptr = std::make_shared<Setter>([ts, node](Value&& value) {
+ node->SetAndQueueAwaitingTasks(ts, std::move(value));
+ });
+ auto failptr =
+ std::make_shared<FailureFunction>([node, ts]() { node->Fail(ts); });
+ auto subcallerptr = std::make_shared<SubCaller>(
+ [ts, failptr = std::move(failptr), this, key](
+ std::vector<Key> const& keys,
+ Consumer&& consumer,
+ LoggerPtr&& logger) {
+ ConsumeAfterKeysReady(ts,
+ key,
+ keys,
+ std::move(consumer),
+ std::move(logger),
+ FailureFunctionPtr{failptr});
+ });
+ auto wrappedLogger =
+ std::make_shared<Logger>([logger, node, ts](auto msg, auto fatal) {
+ if (fatal) {
+ node->Fail(ts);
+ }
+ (*logger)(msg, fatal);
+ });
+ node->QueueOnceProcessingTask(
+ ts,
+ [vc = value_creator_,
+ ts,
+ key,
+ setterptr = std::move(setterptr),
+ wrappedLogger = std::move(wrappedLogger),
+ subcallerptr = std::move(subcallerptr)]() {
+ (*vc)(ts, setterptr, wrappedLogger, subcallerptr, key);
+ });
+ return node;
+ }
+
+ // Queues tasks for each node making sure that the task that calls the
+ // consumer on the values is only queued once all the values are ready
+ void QueueTaskWhenAllReady(
+ gsl::not_null<TaskSystem*> const& ts,
+ std::optional<Key> const& consumer_id,
+ ConsumerPtr const& consumer,
+ // NOLINTNEXTLINE(performance-unnecessary-value-param)
+ FailureFunctionPtr const& fail,
+ std::shared_ptr<std::vector<NodePtr>> const& nodes,
+ std::size_t pos) {
+ if (pos == nodes->size()) {
+ ts->QueueTask([nodes, consumer]() {
+ std::vector<Value const*> values{};
+ values.reserve(nodes->size());
+ std::transform(
+ nodes->begin(),
+ nodes->end(),
+ std::back_inserter(values),
+ [](NodePtr const& node) { return &node->GetValue(); });
+ (*consumer)(values);
+ });
+ }
+ else {
+ auto current = nodes->at(pos);
+ if (fail) {
+ current->QueueOnFailure(ts, [fail]() { (*fail)(); });
+ }
+ auto const queued = current->AddOrQueueAwaitingTask(
+ ts, [ts, consumer, fail, nodes, pos, this, consumer_id]() {
+ QueueTaskWhenAllReady(
+ ts, consumer_id, consumer, fail, nodes, pos + 1);
+ });
+ if (consumer_id and not queued) {
+ RecordNodeRequest(*consumer_id, current);
+ }
+ }
+ }
+
+ void RecordNodeRequest(Key const& consumer_id,
+ gsl::not_null<NodePtr> const& node) {
+ auto tid = std::this_thread::get_id();
+ std::shared_lock shared(requests_m_);
+ auto local_requests_it = requests_by_thread_.find(tid);
+ if (local_requests_it == requests_by_thread_.end()) {
+ shared.unlock();
+ std::unique_lock lock(requests_m_);
+ // create new requests map for thread
+ requests_by_thread_[tid] = NodeRequests{{consumer_id, {node}}};
+ return;
+ }
+ // every thread writes to separate local requests map
+ local_requests_it->second[consumer_id].emplace(node);
+ }
+
+ [[nodiscard]] auto GetPendingRequests() const -> NodeRequests {
+ NodeRequests requests{};
+ std::unique_lock lock(requests_m_);
+ for (auto const& [_, local_requests] : requests_by_thread_) {
+ requests.reserve(requests.size() + local_requests.size());
+ for (auto const& [consumer, deps] : local_requests) {
+ auto& nodes = requests[consumer];
+ std::copy_if( // filter out nodes that are ready by now
+ deps.begin(),
+ deps.end(),
+ std::inserter(nodes, nodes.end()),
+ [](auto const& node) { return not node->IsReady(); });
+ }
+ }
+ return requests;
+ }
+
+ [[nodiscard]] static auto DetectCycleForCaller(
+ gsl::not_null<std::vector<Key>*> const& calls,
+ gsl::not_null<std::unordered_set<Key>*> const& known,
+ NodeRequests const& requests,
+ Key const& caller,
+ std::size_t pos = 0) -> bool {
+ if (not known->contains(caller)) {
+ auto it = requests.find(caller);
+ if (it != requests.end()) {
+ (*calls)[pos++] = caller;
+ for (auto const& dep : it->second) {
+ auto const& dep_key = dep->GetKey();
+ auto last = calls->begin() + static_cast<int>(pos);
+ if (std::find(calls->begin(), last, dep_key) != last) {
+ (*calls)[pos++] = dep_key;
+ calls->resize(pos);
+ return true;
+ }
+ if (DetectCycleForCaller(
+ calls, known, requests, dep_key, pos)) {
+ return true;
+ }
+ }
+ }
+ known->emplace(caller);
+ }
+ return false;
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_MULTITHREADING_ASYNC_MAP_CONSUMER_HPP
diff --git a/src/buildtool/multithreading/async_map_node.hpp b/src/buildtool/multithreading/async_map_node.hpp
new file mode 100644
index 00000000..31a33512
--- /dev/null
+++ b/src/buildtool/multithreading/async_map_node.hpp
@@ -0,0 +1,173 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_MULTITHREADING_ASYNC_MAP_NODE_HPP
+#define INCLUDED_SRC_BUILDTOOL_MULTITHREADING_ASYNC_MAP_NODE_HPP
+
+#include <atomic>
+#include <mutex>
+#include <optional>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/multithreading/task.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+
+// Wrapper around Value to enable async access to it in a continuation-style
+// programming way
+template <typename Key, typename Value>
+class AsyncMapNode {
+ public:
+ explicit AsyncMapNode(Key key) : key_{std::move(key)} {}
+
+ /// \brief Set value and queue awaiting tasks to the task system under a
+ /// unique lock. Awaiting tasks are cleared to ensure node does not hold
+ /// (shared) ownership of any data related to the task once they are given
+ /// to the task system
+ /// \param[in] ts task system to which tasks will be queued
+ /// \param[in] value value to set
+ void SetAndQueueAwaitingTasks(gsl::not_null<TaskSystem*> const& ts,
+ Value&& value) {
+ std::unique_lock lock{m_};
+ if (failed_) {
+ // The node is failed already; no value can be set.
+ return;
+ }
+ value_ = std::move(value);
+ for (auto& task : awaiting_tasks_) {
+ ts->QueueTask(std::move(task));
+ }
+ // After tasks are queued we need to release them and any other
+ // information we are keeping about the tasks
+ awaiting_tasks_.clear();
+ failure_tasks_.clear();
+ }
+
+ /// \brief If node is not marked as queued to be processed, task is queued
+ /// to the task system. A task to process the node (that is, set its value)
+ /// can only be queued once. Lock free
+ /// \param[in] ts task system
+ /// \param[in] task processing task. Function type must have
+ /// operator()()
+ template <typename Function>
+ void QueueOnceProcessingTask(gsl::not_null<TaskSystem*> const& ts,
+ Function&& task) {
+ // if node was already queued to be processed, nothing to do
+ if (GetAndMarkQueuedToBeProcessed()) {
+ return;
+ }
+ ts->QueueTask(std::forward<Function>(task));
+ }
+
+ /// \brief Ensure task will be queued to the task system once the value of
+ /// the node is ready. This operation is lock free once the value is ready
+ /// before that node is uniquely locked while task is being added to
+ /// awaiting tasks
+ /// \param[in] ts task system
+ /// \param[in] task task awaiting for value. Function type must have
+ /// operator()()
+ /// \returns boolean indicating whether task was immediately queued.
+ template <typename Function>
+ [[nodiscard]] auto AddOrQueueAwaitingTask(
+ gsl::not_null<TaskSystem*> const& ts,
+ Function&& task) -> bool {
+ if (IsReady()) {
+ ts->QueueTask(std::forward<Function>(task));
+ return true;
+ }
+ {
+ std::unique_lock ul{m_};
+ if (failed_) {
+ // If the node is failed (and hence will never get ready), do
+ // not queue any more tasks.
+ return false;
+ }
+ // Check again in case the node was made ready after the lock-free
+ // check by another thread
+ if (IsReady()) {
+ ts->QueueTask(std::forward<Function>(task));
+ return true;
+ }
+ awaiting_tasks_.emplace_back(std::forward<Function>(task));
+ return false;
+ }
+ }
+
+ /// \brief Ensure task will be queued to the task system once the value of
+ /// the node is ready. This operation is lock free once the value is ready
+ /// before that node is uniquely locked while task is being added to
+ /// awaiting tasks
+ /// \param[in] ts task system
+ /// \param[in] task task awaiting for value. Function type must have
+ /// operator()()
+ template <typename Function>
+ void QueueOnFailure(gsl::not_null<TaskSystem*> const& ts, Function&& task) {
+ if (IsReady()) {
+ // The node is ready, so it won't fail any more.
+ return;
+ }
+ {
+ std::unique_lock ul{m_};
+ if (failed_) {
+ ts->QueueTask(std::forward<Function>(task));
+ }
+ else {
+ failure_tasks_.emplace_back(std::forward<Function>(task));
+ }
+ }
+ }
+
+ /// \brief Mark the node as failed and schedule the cleanup tasks.
+ /// \param[in] ts task system
+ void Fail(gsl::not_null<TaskSystem*> const& ts) {
+ std::unique_lock ul{m_};
+ if (IsReady()) {
+ // The node has a value already, so it can't be marked as failed any
+ // more
+ return;
+ }
+ if (failed_) {
+ // The was already marked as failed and the failure handled.
+ // So there is nothing more to do.
+ return;
+ }
+ failed_ = true;
+ // As the node will never become ready, we have to clean up all tasks
+ // and schedule the failure tasks.
+ for (auto& task : failure_tasks_) {
+ ts->QueueTask(std::move(task));
+ }
+ awaiting_tasks_.clear();
+ failure_tasks_.clear();
+ }
+
+ // Not thread safe, do not use unless the value has been already set
+ [[nodiscard]] auto GetValue() const& noexcept -> Value const& {
+ // Will only be checked in debug build
+ gsl_ExpectsAudit(value_.has_value());
+ return *value_;
+ }
+ [[nodiscard]] auto GetValue() && noexcept = delete;
+
+ [[nodiscard]] auto GetKey() const& noexcept -> Key const& { return key_; }
+ [[nodiscard]] auto GetKey() && noexcept -> Key { return std::move(key_); }
+
+ [[nodiscard]] auto IsReady() const noexcept -> bool {
+ return value_.has_value();
+ }
+
+ private:
+ Key key_;
+ std::optional<Value> value_{};
+ std::vector<Task> awaiting_tasks_{};
+ std::vector<Task> failure_tasks_{};
+ std::mutex m_{};
+ std::atomic<bool> is_queued_to_be_processed_{false};
+ bool failed_{false};
+
+ /// \brief Sets node as queued to be processed
+ /// \returns True if it was already queued to be processed, false
+ /// otherwise
+ /// Note: this is an atomic, lock-free operation
+ [[nodiscard]] auto GetAndMarkQueuedToBeProcessed() noexcept -> bool {
+ return std::atomic_exchange(&is_queued_to_be_processed_, true);
+ }
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_MULTITHREADING_ASYNC_MAP_NODE_HPP
diff --git a/src/buildtool/multithreading/notification_queue.hpp b/src/buildtool/multithreading/notification_queue.hpp
new file mode 100644
index 00000000..7e79aa43
--- /dev/null
+++ b/src/buildtool/multithreading/notification_queue.hpp
@@ -0,0 +1,188 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_MULTITHREADING_NOTIFICATION_QUEUE_HPP
+#define INCLUDED_SRC_BUILDTOOL_MULTITHREADING_NOTIFICATION_QUEUE_HPP
+
+#include <condition_variable>
+#include <deque>
+#include <mutex>
+#include <optional>
+#include <utility> // std::forward
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/multithreading/task.hpp"
+#include "src/utils/cpp/atomic.hpp"
+
+// Flag that can block the caller until it is set. Cannot be cleared after set.
+class WaitableOneWayFlag {
+ public:
+ // Clear flag. Essentially a noop, if it was set before.
+ void Clear() {
+ if (not was_set_) {
+ initial_ = false;
+ }
+ }
+
+ // Set flag. Essentially a noop, if it was set before.
+ void Set() {
+ if (not was_set_) {
+ was_set_ = true;
+ was_set_.notify_all();
+ }
+ }
+
+ // Blocks caller until it is set, if it was ever cleared.
+ void WaitForSet() {
+ if (not was_set_ and not initial_) {
+ was_set_.wait(false);
+ }
+ }
+
+ private:
+ atomic<bool> was_set_{};
+ bool initial_{true};
+};
+
+// Counter that can block the caller until it reaches zero.
+class WaitableZeroCounter {
+ enum class Status { Init, Wait, Reached };
+
+ public:
+ explicit WaitableZeroCounter(std::size_t init = 0) : count_{init} {}
+
+ // Essentially a noop, if count reached zero since last wait call.
+ void Decrement() {
+ if (status_ != Status::Reached and --count_ == 0) {
+ if (status_ == Status::Wait) {
+ status_ = Status::Reached;
+ status_.notify_all();
+ }
+ }
+ }
+
+ // Essentially a noop, if count reached zero since last wait call.
+ void Increment() {
+ if (status_ != Status::Reached) {
+ ++count_;
+ }
+ }
+
+ // Blocks caller until count reached zero, since last call to this method.
+ void WaitForZero() {
+ status_ = Status::Wait;
+ if (count_ != 0) {
+ status_.wait(Status::Wait);
+ }
+ status_ = Status::Reached;
+ }
+
+ private:
+ std::atomic<std::size_t> count_{};
+ atomic<Status> status_{Status::Init};
+};
+
+class NotificationQueue {
+ public:
+ NotificationQueue(gsl::not_null<WaitableOneWayFlag*> queues_read,
+ gsl::not_null<WaitableZeroCounter*> num_threads_running)
+ : queues_read_{std::move(queues_read)},
+ num_threads_running_{std::move(num_threads_running)} {}
+
+ NotificationQueue(NotificationQueue const& other) = delete;
+ NotificationQueue(NotificationQueue&& other) noexcept
+ : queue_{std::move(other.queue_)},
+ done_{other.done_},
+ queues_read_{std::move(other.queues_read_)},
+ num_threads_running_{std::move(other.num_threads_running_)} {}
+ ~NotificationQueue() = default;
+
+ [[nodiscard]] auto operator=(NotificationQueue const& other)
+ -> NotificationQueue& = delete;
+ [[nodiscard]] auto operator=(NotificationQueue&& other)
+ -> NotificationQueue& = delete;
+
+ // Blocks the thread until it's possible to pop or we are done.
+ // Note that the lock releases ownership of the mutex while waiting
+ // for the queue to have some element or for the notification queue
+ // state to be set to "done".
+ // Returns task popped or nullopt if no task was popped
+ [[nodiscard]] auto pop() -> std::optional<Task> {
+ std::unique_lock lock{mutex_};
+ auto there_is_something_to_pop_or_we_are_done = [&]() {
+ return !queue_.empty() || done_;
+ };
+ if (not there_is_something_to_pop_or_we_are_done()) {
+ num_threads_running_->Decrement();
+ ready_.wait(lock, there_is_something_to_pop_or_we_are_done);
+ num_threads_running_->Increment();
+ }
+
+ if (queue_.empty()) {
+ return std::nullopt;
+ }
+ auto t = std::move(queue_.front());
+ queue_.pop_front();
+ queues_read_->Set();
+ return t;
+ }
+
+ // Returns nullopt if the mutex is already locked or the queue is empty,
+ // otherwise pops the front element of the queue and returns it
+ [[nodiscard]] auto try_pop() -> std::optional<Task> {
+ std::unique_lock lock{mutex_, std::try_to_lock};
+ if (!lock || queue_.empty()) {
+ return std::nullopt;
+ }
+ auto t = std::move(queue_.front());
+ queue_.pop_front();
+ queues_read_->Set();
+ return t;
+ }
+
+ // Push task once the mutex is available (locking it until addition is
+ // finished)
+ template <typename FunctionType>
+ void push(FunctionType&& f) {
+ {
+ std::unique_lock lock{mutex_};
+ queue_.emplace_back(std::forward<FunctionType>(f));
+ }
+ queues_read_->Clear();
+ ready_.notify_one();
+ }
+
+ // Returns false if mutex is locked without pushing the task, pushes task
+ // and returns true otherwise
+ template <typename FunctionType>
+ [[nodiscard]] auto try_push(FunctionType&& f) -> bool {
+ {
+ std::unique_lock lock{mutex_, std::try_to_lock};
+ if (!lock) {
+ return false;
+ }
+ queue_.emplace_back(std::forward<FunctionType>(f));
+ }
+ queues_read_->Clear();
+ ready_.notify_one();
+ return true;
+ }
+
+ // Method to communicate to the notification queue that there will not be
+ // any more queries. Queries after calling this method are not guaratied to
+ // work as expected
+ void done() {
+ {
+ std::unique_lock lock{mutex_};
+ done_ = true;
+ }
+ ready_.notify_all();
+ }
+
+ private:
+ std::deque<Task> queue_{};
+ bool done_{false};
+ std::mutex mutex_{};
+ std::condition_variable ready_{};
+ gsl::not_null<WaitableOneWayFlag*> queues_read_;
+ gsl::not_null<WaitableZeroCounter*> num_threads_running_;
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_MULTITHREADING_NOTIFICATION_QUEUE_HPP
diff --git a/src/buildtool/multithreading/task.hpp b/src/buildtool/multithreading/task.hpp
new file mode 100644
index 00000000..49eb20a9
--- /dev/null
+++ b/src/buildtool/multithreading/task.hpp
@@ -0,0 +1,38 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_MULTITHREADING_TASK_HPP
+#define INCLUDED_SRC_BUILDTOOL_MULTITHREADING_TASK_HPP
+
+#include <functional>
+#include <type_traits>
+
+class Task {
+ public:
+ using TaskFunc = std::function<void()>;
+
+ Task() noexcept = default;
+
+ // NOLINTNEXTLINE(modernize-pass-by-value)
+ explicit Task(TaskFunc const& function) noexcept : f_{function} {}
+ explicit Task(TaskFunc&& function) noexcept : f_{std::move(function)} {}
+
+ void operator()() { f_(); }
+
+ // To be able to discern whether the internal f_ has been set or not,
+ // allowing us to write code such as:
+ /*
+ Task t;
+ while (!t) {
+ t = TryGetTaskFromQueue(); // (*)
+ }
+ t(); // (**)
+ */
+ // (*) does `return Task();` or `return {};` if queue is empty or locked)
+ // (**) we can now surely execute the task (and be sure it won't throw any
+ // exception) (for the sake of the example, imagine we are sure that the
+ // queue wasn't empty, otherwise this would be an infinite loop)
+ explicit operator bool() const noexcept { return f_.operator bool(); }
+
+ private:
+ TaskFunc f_{};
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_MULTITHREADING_TASK_HPP
diff --git a/src/buildtool/multithreading/task_system.cpp b/src/buildtool/multithreading/task_system.cpp
new file mode 100644
index 00000000..8c976a2f
--- /dev/null
+++ b/src/buildtool/multithreading/task_system.cpp
@@ -0,0 +1,56 @@
+#include "src/buildtool/multithreading/task_system.hpp"
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/multithreading/task.hpp"
+
+TaskSystem::TaskSystem() : TaskSystem(std::thread::hardware_concurrency()) {}
+
+TaskSystem::TaskSystem(std::size_t number_of_threads)
+ : thread_count_{std::max(1UL, number_of_threads)},
+ num_threads_running_{thread_count_} {
+ for (std::size_t index = 0; index < thread_count_; ++index) {
+ queues_.emplace_back(&queues_read_, &num_threads_running_);
+ }
+ for (std::size_t index = 0; index < thread_count_; ++index) {
+ threads_.emplace_back([&, index]() { Run(index); });
+ }
+}
+
+TaskSystem::~TaskSystem() {
+ // When starting a new task system all spawned threads will immediately go
+ // to sleep and wait for tasks. Even after adding some tasks, it can take a
+ // while until the first thread wakes up. Therefore, we first need to wait
+ // for the queues being read, before we can wait for all threads to become
+ // idle.
+ queues_read_.WaitForSet();
+ num_threads_running_.WaitForZero();
+ for (auto& q : queues_) {
+ q.done();
+ }
+ for (auto& t : threads_) {
+ t.join();
+ }
+}
+
+void TaskSystem::Run(std::size_t idx) {
+ gsl_Expects(thread_count_ > 0);
+
+ while (true) {
+ std::optional<Task> t{};
+ for (std::size_t i = 0; i < thread_count_; ++i) {
+ t = queues_[(idx + i) % thread_count_].try_pop();
+ if (t) {
+ break;
+ }
+ }
+
+ // NOLINTNEXTLINE(clang-analyzer-core.DivideZero)
+ t = t ? t : queues_[idx % thread_count_].pop();
+
+ if (!t) {
+ break;
+ }
+
+ (*t)();
+ }
+}
diff --git a/src/buildtool/multithreading/task_system.hpp b/src/buildtool/multithreading/task_system.hpp
new file mode 100644
index 00000000..c2e46779
--- /dev/null
+++ b/src/buildtool/multithreading/task_system.hpp
@@ -0,0 +1,65 @@
+#ifndef INCLUDED_SRC_BUILDTOOL_MULTITHREADING_TASK_SYSTEM_HPP
+#define INCLUDED_SRC_BUILDTOOL_MULTITHREADING_TASK_SYSTEM_HPP
+
+#include <algorithm>
+#include <atomic>
+#include <thread>
+#include <vector>
+
+#include "src/buildtool/multithreading/notification_queue.hpp"
+
+class TaskSystem {
+ public:
+ // Constructors create as many threads as specified (or
+ // std::thread::hardware_concurrency() many if not specified) running
+ // `TaskSystem::Run(index)` on them, where `index` is their position in
+ // `threads_`
+ TaskSystem();
+ explicit TaskSystem(std::size_t number_of_threads);
+
+ TaskSystem(TaskSystem const&) = delete;
+ TaskSystem(TaskSystem&&) = delete;
+ auto operator=(TaskSystem const&) -> TaskSystem& = delete;
+ auto operator=(TaskSystem &&) -> TaskSystem& = delete;
+
+ // Destructor calls sets to "done" all notification queues and joins the
+ // threads. Note that joining the threads will wait until the Run method
+ // they are running is finished
+ ~TaskSystem();
+
+ // Queue a task. Task will be added to the first notification queue that is
+ // found to be unlocked or, if none is found (after kNumberOfAttemps
+ // iterations), to the one in `index+1` position waiting until it's
+ // unlocked.
+ template <typename FunctionType>
+ void QueueTask(FunctionType&& f) noexcept {
+ auto idx = index_++;
+
+ for (std::size_t i = 0; i < thread_count_ * kNumberOfAttempts; ++i) {
+ if (queues_[(idx + i) % thread_count_].try_push(
+ std::forward<FunctionType>(f))) {
+ return;
+ }
+ }
+ queues_[idx % thread_count_].push(std::forward<FunctionType>(f));
+ }
+
+ [[nodiscard]] auto NumberOfThreads() const noexcept -> std::size_t {
+ return thread_count_;
+ }
+
+ private:
+ std::size_t const thread_count_{
+ std::max(1U, std::thread::hardware_concurrency())};
+ std::vector<std::thread> threads_{};
+ std::vector<NotificationQueue> queues_{};
+ std::atomic<std::size_t> index_{0};
+ WaitableOneWayFlag queues_read_{};
+ WaitableZeroCounter num_threads_running_{};
+
+ static constexpr std::size_t kNumberOfAttempts = 5;
+
+ void Run(std::size_t idx);
+};
+
+#endif // INCLUDED_SRC_BUILDTOOL_MULTITHREADING_TASK_SYSTEM_HPP
diff --git a/src/utils/TARGETS b/src/utils/TARGETS
new file mode 100644
index 00000000..9e26dfee
--- /dev/null
+++ b/src/utils/TARGETS
@@ -0,0 +1 @@
+{} \ No newline at end of file
diff --git a/src/utils/cpp/TARGETS b/src/utils/cpp/TARGETS
new file mode 100644
index 00000000..6b4347a2
--- /dev/null
+++ b/src/utils/cpp/TARGETS
@@ -0,0 +1,40 @@
+{ "hash_combine":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["hash_combine"]
+ , "hdrs": ["hash_combine.hpp"]
+ , "deps": [["@", "gsl-lite", "", "gsl-lite"]]
+ , "stage": ["src", "utils", "cpp"]
+ }
+, "type_safe_arithmetic":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["type_safe_arithmetic"]
+ , "hdrs": ["type_safe_arithmetic.hpp"]
+ , "deps": [["@", "gsl-lite", "", "gsl-lite"]]
+ , "stage": ["src", "utils", "cpp"]
+ }
+, "json":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["json"]
+ , "hdrs": ["json.hpp"]
+ , "deps": [["@", "json", "", "json"], ["@", "gsl-lite", "", "gsl-lite"]]
+ , "stage": ["src", "utils", "cpp"]
+ }
+, "concepts":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["concepts"]
+ , "hdrs": ["concepts.hpp"]
+ , "stage": ["src", "utils", "cpp"]
+ }
+, "atomic":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["atomic"]
+ , "hdrs": ["atomic.hpp"]
+ , "stage": ["src", "utils", "cpp"]
+ }
+, "hex_string":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["hex_string"]
+ , "hdrs": ["hex_string.hpp"]
+ , "stage": ["src", "utils", "cpp"]
+ }
+} \ No newline at end of file
diff --git a/src/utils/cpp/atomic.hpp b/src/utils/cpp/atomic.hpp
new file mode 100644
index 00000000..7f7631d0
--- /dev/null
+++ b/src/utils/cpp/atomic.hpp
@@ -0,0 +1,119 @@
+#ifndef INCLUDED_SRC_UTILS_CPP_ATOMIC_HPP
+#define INCLUDED_SRC_UTILS_CPP_ATOMIC_HPP
+
+#include <atomic>
+#include <condition_variable>
+#include <shared_mutex>
+
+// Atomic wrapper with notify/wait capabilities.
+// TODO(modernize): Replace any use this class by C++20's std::atomic<T>, once
+// libcxx adds support for notify_*() and wait().
+// [https://libcxx.llvm.org/docs/Cxx2aStatus.html]
+template <class T>
+class atomic {
+ public:
+ atomic() = default;
+ explicit atomic(T value) : value_{std::move(value)} {}
+ atomic(atomic const& other) = delete;
+ atomic(atomic&& other) = delete;
+ ~atomic() = default;
+
+ auto operator=(atomic const& other) -> atomic& = delete;
+ auto operator=(atomic&& other) -> atomic& = delete;
+ auto operator=(T desired) -> T { // NOLINT
+ std::shared_lock lock(mutex_);
+ value_ = desired;
+ return desired;
+ }
+ operator T() const { return static_cast<T>(value_); } // NOLINT
+
+ void store(T desired, std::memory_order order = std::memory_order_seq_cst) {
+ std::shared_lock lock(mutex_);
+ value_.store(std::move(desired), order);
+ }
+ [[nodiscard]] auto load(
+ std::memory_order order = std::memory_order_seq_cst) const -> T {
+ return value_.load(order);
+ }
+
+ template <class U = T, class = std::enable_if_t<std::is_integral_v<U>>>
+ auto operator++() -> T {
+ std::shared_lock lock(mutex_);
+ return ++value_;
+ }
+ template <class U = T, class = std::enable_if_t<std::is_integral_v<U>>>
+ [[nodiscard]] auto operator++(int) -> T {
+ std::shared_lock lock(mutex_);
+ return value_++;
+ }
+ template <class U = T, class = std::enable_if_t<std::is_integral_v<U>>>
+ auto operator--() -> T {
+ std::shared_lock lock(mutex_);
+ return --value_;
+ }
+ template <class U = T, class = std::enable_if_t<std::is_integral_v<U>>>
+ [[nodiscard]] auto operator--(int) -> T {
+ std::shared_lock lock(mutex_);
+ return value_--;
+ }
+
+ void notify_one() { cv_.notify_one(); }
+ void notify_all() { cv_.notify_all(); }
+ void wait(T old,
+ std::memory_order order = std::memory_order::seq_cst) const {
+ std::unique_lock lock(mutex_);
+ cv_.wait(lock,
+ [this, &old, order]() { return value_.load(order) != old; });
+ }
+
+ private:
+ std::atomic<T> value_{};
+ mutable std::shared_mutex mutex_{};
+ mutable std::condition_variable_any cv_{};
+};
+
+// Atomic shared_pointer with notify/wait capabilities.
+// TODO(modernize): Replace any use this class by C++20's
+// std::atomic<std::shared_ptr<T>>, once libcxx adds support for it.
+// [https://libcxx.llvm.org/docs/Cxx2aStatus.html]
+template <class T>
+class atomic_shared_ptr {
+ using ptr_t = std::shared_ptr<T>;
+
+ public:
+ atomic_shared_ptr() = default;
+ explicit atomic_shared_ptr(ptr_t value) : value_{std::move(value)} {}
+ atomic_shared_ptr(atomic_shared_ptr const& other) = delete;
+ atomic_shared_ptr(atomic_shared_ptr&& other) = delete;
+ ~atomic_shared_ptr() = default;
+
+ auto operator=(atomic_shared_ptr const& other)
+ -> atomic_shared_ptr& = delete;
+ auto operator=(atomic_shared_ptr&& other) -> atomic_shared_ptr& = delete;
+ auto operator=(ptr_t desired) -> ptr_t { // NOLINT
+ std::shared_lock lock(mutex_);
+ value_ = desired;
+ return desired;
+ }
+ operator ptr_t() const { value_; } // NOLINT
+
+ void store(ptr_t desired) {
+ std::shared_lock lock(mutex_);
+ value_ = std::move(desired);
+ }
+ [[nodiscard]] auto load() const -> ptr_t { return value_; }
+
+ void notify_one() { cv_.notify_one(); }
+ void notify_all() { cv_.notify_all(); }
+ void wait(ptr_t old) const {
+ std::unique_lock lock(mutex_);
+ cv_.wait(lock, [this, &old]() { return value_ != old; });
+ }
+
+ private:
+ ptr_t value_{};
+ mutable std::shared_mutex mutex_{};
+ mutable std::condition_variable_any cv_{};
+};
+
+#endif // INCLUDED_SRC_UTILS_CPP_ATOMIC_HPP
diff --git a/src/utils/cpp/concepts.hpp b/src/utils/cpp/concepts.hpp
new file mode 100644
index 00000000..92718b43
--- /dev/null
+++ b/src/utils/cpp/concepts.hpp
@@ -0,0 +1,55 @@
+#ifndef INCLUDED_SRC_UTILS_CPP_CONCEPTS_HPP
+#define INCLUDED_SRC_UTILS_CPP_CONCEPTS_HPP
+
+#include <string>
+#include <type_traits>
+
+// TODO(modernize): remove this once std::derived_from is shipped with libcxx
+template <class T, class U>
+concept derived_from = std::is_base_of_v<U, T>&&
+ std::is_convertible_v<const volatile T*, const volatile U*>;
+
+// TODO(modernize): remove this once std::same_as is shipped with libcxx
+template <class T, class U>
+concept same_as = std::is_same_v<T, U>and std::is_same_v<U, T>;
+
+template <class T>
+concept ContainsString = requires {
+ typename T::value_type;
+}
+and std::is_same_v<typename T::value_type, std::string>;
+
+template <class T>
+concept HasSize = requires(T const c) {
+ { c.size() }
+ ->same_as<std::size_t>; // TODO(modernize): replace by std::same_as
+};
+
+template <typename T>
+concept HasToString = requires(T const t) {
+ { t.ToString() }
+ ->same_as<std::string>; // TODO(modernize): replace by std::same_as
+};
+
+template <class T>
+concept InputIterableContainer = requires(T const c) {
+ { c.begin() }
+ ->same_as<typename T::const_iterator>; // TODO(modernize): replace by
+ // std::input_iterator
+ { c.end() }
+ ->same_as<typename T::const_iterator>; // TODO(modernize): replace by
+ // std::input_iterator
+};
+
+template <class T>
+concept OutputIterableContainer = InputIterableContainer<T>and requires(T c) {
+ { std::inserter(c, c.begin()) }
+ ->same_as<std::insert_iterator<T>>; // TODO(modernize): replace by
+ // std::output_iterator
+};
+
+template <class T>
+concept InputIterableStringContainer =
+ InputIterableContainer<T>and ContainsString<T>;
+
+#endif // INCLUDED_SRC_UTILS_CPP_CONCEPTS_HPP
diff --git a/src/utils/cpp/hash_combine.hpp b/src/utils/cpp/hash_combine.hpp
new file mode 100644
index 00000000..65c0c8ad
--- /dev/null
+++ b/src/utils/cpp/hash_combine.hpp
@@ -0,0 +1,15 @@
+#ifndef INCLUDED_SRC_UTILS_CPP_HASH_COMBINE_HPP
+#define INCLUDED_SRC_UTILS_CPP_HASH_COMBINE_HPP
+
+#include "gsl-lite/gsl-lite.hpp"
+
+// Taken from Boost, as hash_combine did not yet make it to STL.
+// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2017/p0814r0.pdf
+template <class T>
+inline auto hash_combine(gsl::not_null<std::size_t*> const& seed, T const& v)
+ -> void {
+ *seed ^=
+ std::hash<T>{}(v) + 0x9e3779b9 + (*seed << 6) + (*seed >> 2); // NOLINT
+}
+
+#endif
diff --git a/src/utils/cpp/hex_string.hpp b/src/utils/cpp/hex_string.hpp
new file mode 100644
index 00000000..86ea1b9e
--- /dev/null
+++ b/src/utils/cpp/hex_string.hpp
@@ -0,0 +1,19 @@
+#ifndef INCLUDED_SRC_UTILS_CPP_HEX_STRING_HPP
+#define INCLUDED_SRC_UTILS_CPP_HEX_STRING_HPP
+
+#include <iomanip>
+#include <sstream>
+#include <string>
+
+[[nodiscard]] static inline auto ToHexString(std::string const& bytes)
+ -> std::string {
+ std::ostringstream ss{};
+ ss << std::hex << std::setfill('0');
+ for (auto const& b : bytes) {
+ ss << std::setw(2)
+ << static_cast<int>(static_cast<unsigned char const>(b));
+ }
+ return ss.str();
+}
+
+#endif // INCLUDED_SRC_UTILS_CPP_HEX_STRING_HPP
diff --git a/src/utils/cpp/json.hpp b/src/utils/cpp/json.hpp
new file mode 100644
index 00000000..8945e975
--- /dev/null
+++ b/src/utils/cpp/json.hpp
@@ -0,0 +1,83 @@
+#ifndef INCLUDED_SRC_UTILS_CPP_JSON_HPP
+#define INCLUDED_SRC_UTILS_CPP_JSON_HPP
+
+#include <algorithm>
+#include <optional>
+#include <sstream>
+#include <string>
+
+#include "nlohmann/json.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+
+template <typename ValueT>
+auto ExtractValueAs(
+ nlohmann::json const& j,
+ std::string const& key,
+ std::function<void(std::string const& error)>&& logger =
+ [](std::string const & /*unused*/) -> void {}) noexcept
+ -> std::optional<ValueT> {
+ try {
+ auto it = j.find(key);
+ if (it == j.end()) {
+ logger("key " + key + " cannot be found in JSON object");
+ return std::nullopt;
+ }
+ return it.value().template get<ValueT>();
+ } catch (std::exception& e) {
+ logger(e.what());
+ return std::nullopt;
+ }
+}
+
+namespace detail {
+
+[[nodiscard]] static inline auto IndentListsOnlyUntilDepth(
+ nlohmann::json const& json,
+ std::string const& indent,
+ std::size_t until,
+ std::size_t depth) -> std::string {
+ using iterator = std::ostream_iterator<std::string>;
+ if (json.is_object()) {
+ std::size_t i{};
+ std::ostringstream oss{};
+ oss << '{' << std::endl;
+ for (auto const& [key, value] : json.items()) {
+ std::fill_n(iterator{oss}, depth + 1, indent);
+ oss << nlohmann::json(key).dump() << ": "
+ << IndentListsOnlyUntilDepth(value, indent, until, depth + 1)
+ << (++i == json.size() ? "" : ",") << std::endl;
+ }
+ std::fill_n(iterator{oss}, depth, indent);
+ oss << '}';
+ gsl_EnsuresAudit(nlohmann::json::parse(oss.str()) == json);
+ return oss.str();
+ }
+ if (json.is_array() and depth < until) {
+ std::size_t i{};
+ std::ostringstream oss{};
+ oss << '[' << std::endl;
+ for (auto const& value : json) {
+ std::fill_n(iterator{oss}, depth + 1, indent);
+ oss << IndentListsOnlyUntilDepth(value, indent, until, depth + 1)
+ << (++i == json.size() ? "" : ",") << std::endl;
+ }
+ std::fill_n(iterator{oss}, depth, indent);
+ oss << ']';
+ gsl_EnsuresAudit(nlohmann::json::parse(oss.str()) == json);
+ return oss.str();
+ }
+ return json.dump();
+}
+
+} // namespace detail
+
+/// \brief Dump json with indent. Indent lists only until specified depth.
+[[nodiscard]] static inline auto IndentListsOnlyUntilDepth(
+ nlohmann::json const& json,
+ std::size_t indent,
+ std::size_t until_depth = 0) -> std::string {
+ return detail::IndentListsOnlyUntilDepth(
+ json, std::string(indent, ' '), until_depth, 0);
+}
+
+#endif // INCLUDED_SRC_UTILS_CPP_JSON_HPP
diff --git a/src/utils/cpp/type_safe_arithmetic.hpp b/src/utils/cpp/type_safe_arithmetic.hpp
new file mode 100644
index 00000000..21bba0b5
--- /dev/null
+++ b/src/utils/cpp/type_safe_arithmetic.hpp
@@ -0,0 +1,197 @@
+#ifndef INCLUDED_SRC_UTILS_CPP_TYPE_SAFE_ARITHMETIC_HPP
+#define INCLUDED_SRC_UTILS_CPP_TYPE_SAFE_ARITHMETIC_HPP
+
+#include <limits>
+#include <type_traits>
+
+#include "gsl-lite/gsl-lite.hpp"
+
+/// \struct type_safe_arithmetic_tag
+/// \brief Abstract tag defining types and limits for custom arithmetic types.
+/// Usage example:
+/// struct my_type_tag : type_safe_arithmetic_tag<int, -2, +3> {};
+/// using my_type_t = type_safe_arithmetic<my_type_tag>;
+template <typename T,
+ T MIN_VALUE = std::numeric_limits<T>::lowest(),
+ T MAX_VALUE = std::numeric_limits<T>::max(),
+ T SMALLEST_VALUE = std::numeric_limits<T>::min()>
+struct type_safe_arithmetic_tag {
+ static_assert(std::is_arithmetic<T>::value,
+ "T must be an arithmetic type (integer or floating-point)");
+
+ using value_t = T;
+ using reference_t = T&;
+ using const_reference_t = T const&;
+ using pointer_t = T*;
+ using const_pointer_t = T const*;
+
+ static constexpr value_t max_value = MAX_VALUE;
+ static constexpr value_t min_value = MIN_VALUE;
+ static constexpr value_t smallest_value = SMALLEST_VALUE;
+};
+
+/// \class type_safe_arithmetic
+/// \brief Abstract class for defining custom arithmetic types.
+/// \tparam TAG The actual \ref type_safe_arithmetic_tag
+template <typename TAG>
+class type_safe_arithmetic {
+ typename TAG::value_t m_value{};
+
+ public:
+ using tag_t = TAG;
+ using value_t = typename tag_t::value_t;
+ using reference_t = typename tag_t::reference_t;
+ using const_reference_t = typename tag_t::const_reference_t;
+ using pointer_t = typename tag_t::pointer_t;
+ using const_pointer_t = typename tag_t::const_pointer_t;
+
+ static constexpr value_t max_value = tag_t::max_value;
+ static constexpr value_t min_value = tag_t::min_value;
+ static constexpr value_t smallest_value = tag_t::smallest_value;
+
+ constexpr type_safe_arithmetic() = default;
+
+ // NOLINTNEXTLINE
+ constexpr /*explicit*/ type_safe_arithmetic(value_t value) { set(value); }
+
+ type_safe_arithmetic(type_safe_arithmetic const&) = default;
+ type_safe_arithmetic(type_safe_arithmetic&&) noexcept = default;
+ auto operator=(type_safe_arithmetic const&)
+ -> type_safe_arithmetic& = default;
+ auto operator=(type_safe_arithmetic&&) noexcept
+ -> type_safe_arithmetic& = default;
+ ~type_safe_arithmetic() = default;
+
+ auto operator=(value_t value) -> type_safe_arithmetic& {
+ set(value);
+ return *this;
+ }
+
+ // NOLINTNEXTLINE
+ constexpr /*explicit*/ operator value_t() const { return m_value; }
+
+ constexpr auto get() const -> value_t { return m_value; }
+
+ constexpr void set(value_t value) {
+ gsl_Expects(value >= min_value && value <= max_value &&
+ "value output of range");
+ m_value = value;
+ }
+
+ auto pointer() const -> const_pointer_t { return &m_value; }
+};
+
+// template <typename TAG>
+// bool operator==(type_safe_arithmetic<TAG> lhs, type_safe_arithmetic<TAG> rhs)
+// {
+// return lhs.get() == rhs.get();
+// }
+//
+// template <typename TAG>
+// bool operator!=(type_safe_arithmetic<TAG> lhs, type_safe_arithmetic<TAG> rhs)
+// {
+// return !(lhs == rhs);
+// }
+//
+// template <typename TAG>
+// bool operator>(type_safe_arithmetic<TAG> lhs, type_safe_arithmetic<TAG> rhs)
+// {
+// return lhs.get() > rhs.get();
+// }
+//
+// template <typename TAG>
+// bool operator>=(type_safe_arithmetic<TAG> lhs, type_safe_arithmetic<TAG> rhs)
+// {
+// return lhs.get() >= rhs.get();
+// }
+//
+// template <typename TAG>
+// bool operator<(type_safe_arithmetic<TAG> lhs, type_safe_arithmetic<TAG> rhs)
+// {
+// return lhs.get() < rhs.get();
+// }
+//
+// template <typename TAG>
+// bool operator<=(type_safe_arithmetic<TAG> lhs, type_safe_arithmetic<TAG> rhs)
+// {
+// return lhs.get() <= rhs.get();
+// }
+//
+// template <typename TAG>
+// type_safe_arithmetic<TAG> operator+(type_safe_arithmetic<TAG> lhs,
+// type_safe_arithmetic<TAG> rhs) {
+// return type_safe_arithmetic<TAG>{lhs.get() + rhs.get()};
+// }
+
+template <typename TAG>
+auto operator+=(type_safe_arithmetic<TAG>& lhs, type_safe_arithmetic<TAG> rhs)
+ -> type_safe_arithmetic<TAG>& {
+ lhs.set(lhs.get() + rhs.get());
+ return lhs;
+}
+
+// template <typename TAG>
+// type_safe_arithmetic<TAG> operator-(type_safe_arithmetic<TAG> lhs,
+// type_safe_arithmetic<TAG> rhs) {
+// return type_safe_arithmetic<TAG>{lhs.get() - rhs.get()};
+// }
+//
+// template <typename TAG>
+// type_safe_arithmetic<TAG>& operator-=(type_safe_arithmetic<TAG>& lhs,
+// type_safe_arithmetic<TAG> rhs) {
+// lhs.set(lhs.get() - rhs.get());
+// return lhs;
+// }
+//
+// template <typename TAG>
+// type_safe_arithmetic<TAG> operator*(type_safe_arithmetic<TAG> lhs,
+// typename TAG::value_t rhs) {
+// return type_safe_arithmetic<TAG>{lhs.get() - rhs};
+// }
+//
+// template <typename TAG>
+// type_safe_arithmetic<TAG>& operator*=(type_safe_arithmetic<TAG>& lhs,
+// typename TAG::value_t rhs) {
+// lhs.set(lhs.get() * rhs);
+// return lhs;
+// }
+//
+// template <typename TAG>
+// type_safe_arithmetic<TAG> operator/(type_safe_arithmetic<TAG> lhs,
+// typename TAG::value_t rhs) {
+// return type_safe_arithmetic<TAG>{lhs.get() / rhs};
+// }
+//
+// template <typename TAG>
+// type_safe_arithmetic<TAG>& operator/=(type_safe_arithmetic<TAG>& lhs,
+// typename TAG::value_t rhs) {
+// lhs.set(lhs.get() / rhs);
+// return lhs;
+// }
+//
+// template <typename TAG>
+// type_safe_arithmetic<TAG>& operator++(type_safe_arithmetic<TAG>& a) {
+// return a += type_safe_arithmetic<TAG>{1};
+// }
+
+template <typename TAG>
+auto operator++(type_safe_arithmetic<TAG>& a, int)
+ -> type_safe_arithmetic<TAG> {
+ auto r = a;
+ a += type_safe_arithmetic<TAG>{1};
+ return r;
+}
+
+// template <typename TAG>
+// type_safe_arithmetic<TAG>& operator--(type_safe_arithmetic<TAG>& a) {
+// return a -= type_safe_arithmetic<TAG>{1};
+// }
+//
+// template <typename TAG>
+// type_safe_arithmetic<TAG> operator--(type_safe_arithmetic<TAG>& a, int) {
+// auto r = a;
+// a += type_safe_arithmetic<TAG>{1};
+// return r;
+// }
+
+#endif // INCLUDED_SRC_UTILS_CPP_TYPE_SAFE_ARITHMETIC_HPP
diff --git a/test/TARGETS b/test/TARGETS
new file mode 100644
index 00000000..ed206b74
--- /dev/null
+++ b/test/TARGETS
@@ -0,0 +1,24 @@
+{ "catch-main":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["catch-main"]
+ , "srcs": ["main.cpp"]
+ , "deps": [["@", "catch2", "", "catch2"], ["test/utils", "log_config"]]
+ , "stage": ["test"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "dirs":
+ [ [["./", "buildtool", "TESTS"], "buildtool"]
+ , [["./", "utils", "TESTS"], "utils"]
+ ]
+ }
+, "ALL":
+ { "type": ["@", "rules", "CC", "configure"]
+ , "tainted": ["test"]
+ , "arguments_config": ["OS", "ARCH"]
+ , "os": [{"type": "var", "name": "OS", "default": "linux"}]
+ , "arch": [{"type": "var", "name": "ARCH", "default": "x86_64"}]
+ , "target": ["TESTS"]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/TARGETS b/test/buildtool/TARGETS
new file mode 100644
index 00000000..084adec6
--- /dev/null
+++ b/test/buildtool/TARGETS
@@ -0,0 +1,15 @@
+{ "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "dirs":
+ [ [["./", "common", "TESTS"], "common"]
+ , [["./", "crypto", "TESTS"], "crypto"]
+ , [["./", "execution_api", "TESTS"], "execution_api"]
+ , [["./", "execution_engine", "TESTS"], "execution_engine"]
+ , [["./", "file_system", "TESTS"], "file_system"]
+ , [["./", "graph_traverser", "TESTS"], "graph_traverser"]
+ , [["./", "logging", "TESTS"], "logging"]
+ , [["./", "multithreading", "TESTS"], "multithreading"]
+ ]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/build_engine/base_maps/TARGETS b/test/buildtool/build_engine/base_maps/TARGETS
new file mode 100644
index 00000000..042ca0b3
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/TARGETS
@@ -0,0 +1,127 @@
+{ "test_repo":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["test_repo"]
+ , "hdrs": ["test_repo.hpp"]
+ , "deps":
+ [ ["src/buildtool/common", "config"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "entity_name":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["entity_name"]
+ , "srcs": ["entity_name.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/build_engine/base_maps", "entity_name"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "directory_map":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["directory_map"]
+ , "srcs": ["directory_map.test.cpp"]
+ , "data": ["test_data_src"]
+ , "deps":
+ [ "test_repo"
+ , ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/common", "config"]
+ , ["src/buildtool/build_engine/base_maps", "directory_map"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "json_file_map":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["json_file_map"]
+ , "srcs": ["json_file_map.test.cpp"]
+ , "data": ["test_data_json"]
+ , "deps":
+ [ "test_repo"
+ , ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/build_engine/base_maps", "json_file_map"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "source_map":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["source_map"]
+ , "srcs": ["source_map.test.cpp"]
+ , "data": ["test_data"]
+ , "deps":
+ [ "test_repo"
+ , ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/build_engine/base_maps", "directory_map"]
+ , ["src/buildtool/build_engine/base_maps", "source_map"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "expression_map":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["expression_map"]
+ , "srcs": ["expression_map.test.cpp"]
+ , "data": ["test_data_expr"]
+ , "deps":
+ [ "test_repo"
+ , ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/build_engine/base_maps", "expression_map"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "rule_map":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["rule_map"]
+ , "srcs": ["rule_map.test.cpp"]
+ , "data": ["test_data_rule"]
+ , "deps":
+ [ "test_repo"
+ , ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/build_engine/base_maps", "rule_map"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "test_data_src":
+ { "type": ["@", "rules", "data", "staged"]
+ , "srcs": ["data_src/file", "data_src/foo/bar/file"]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "test_data_json":
+ { "type": ["@", "rules", "data", "staged"]
+ , "srcs": ["data_json/bad.json", "data_json/foo.json"]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "test_data_expr":
+ { "type": ["@", "rules", "data", "staged"]
+ , "srcs": ["data_expr/EXPRESSIONS", "data_expr/readers/EXPRESSIONS"]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "test_data_rule":
+ { "type": ["@", "rules", "data", "staged"]
+ , "srcs": ["data_rule/RULES", "data_rule/composers/EXPRESSIONS"]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "test_data":
+ { "type": ["@", "rules", "data", "staged"]
+ , "srcs":
+ ["test_data_src", "test_data_json", "test_data_expr", "test_data_rule"]
+ , "stage": ["test", "buildtool", "build_engine", "base_maps"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps":
+ [ "directory_map"
+ , "entity_name"
+ , "expression_map"
+ , "json_file_map"
+ , "rule_map"
+ , "source_map"
+ ]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/build_engine/base_maps/data/test_repo.bundle b/test/buildtool/build_engine/base_maps/data/test_repo.bundle
new file mode 100644
index 00000000..06a38340
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/data/test_repo.bundle
Binary files differ
diff --git a/test/buildtool/build_engine/base_maps/data_expr/EXPRESSIONS b/test/buildtool/build_engine/base_maps/data_expr/EXPRESSIONS
new file mode 100644
index 00000000..06ac3800
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/data_expr/EXPRESSIONS
@@ -0,0 +1,66 @@
+{
+ "test_expression_literal": {
+ "expression": "foo"
+ },
+ "test_read_vars": {
+ "vars": ["FOO"],
+ "expression": {
+ "type": "var",
+ "name": "FOO"
+ }
+ },
+ "test_call_import": {
+ "vars": ["FOO"],
+ "imports": {
+ "read_foo": [
+ "readers",
+ "real_foo_reader"
+ ]
+ },
+ "expression": {
+ "type": "CALL_EXPRESSION",
+ "name": "read_foo"
+ }
+ },
+ "test_overwrite_import": {
+ "vars": ["FOO"],
+ "imports": {
+ "read_foo": [
+ "readers",
+ "proxy_foo_reader"
+ ]
+ },
+ "expression": {
+ "type": "CALL_EXPRESSION",
+ "name": "read_foo"
+ }
+ },
+ "test_missing_vars": {
+ "expression": {
+ "type": "var",
+ "name": "FOO"
+ }
+ },
+ "test_missing_imports": {
+ "expression": {
+ "type": "CALL_EXPRESSION",
+ "name": "read_foo"
+ }
+ },
+ "test_malformed_function": "not_an_object",
+ "test_malformed_expression": {
+ "missing_expression": {}
+ },
+ "test_malformed_vars": {
+ "vars": "not_a_list",
+ "expression": {
+ "type": "empty_map"
+ }
+ },
+ "test_malformed_imports": {
+ "imports": "not_an_object",
+ "expression": {
+ "type": "empty_map"
+ }
+ }
+}
diff --git a/test/buildtool/build_engine/base_maps/data_expr/readers/EXPRESSIONS b/test/buildtool/build_engine/base_maps/data_expr/readers/EXPRESSIONS
new file mode 100644
index 00000000..9ad388ed
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/data_expr/readers/EXPRESSIONS
@@ -0,0 +1,23 @@
+{
+ "proxy_foo_reader": {
+ "vars": [
+ "FOO"
+ ],
+ "imports": {
+ "read_foo": "real_foo_reader"
+ },
+ "expression": {
+ "type": "CALL_EXPRESSION",
+ "name": "read_foo"
+ }
+ },
+ "real_foo_reader": {
+ "vars": [
+ "FOO"
+ ],
+ "expression": {
+ "type": "var",
+ "name": "FOO"
+ }
+ }
+}
diff --git a/test/buildtool/build_engine/base_maps/data_json/bad.json b/test/buildtool/build_engine/base_maps/data_json/bad.json
new file mode 100644
index 00000000..5b157d08
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/data_json/bad.json
@@ -0,0 +1 @@
+This is not JSON
diff --git a/test/buildtool/build_engine/base_maps/data_json/foo.json b/test/buildtool/build_engine/base_maps/data_json/foo.json
new file mode 100644
index 00000000..c8c4105e
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/data_json/foo.json
@@ -0,0 +1,3 @@
+{
+ "foo": "bar"
+}
diff --git a/test/buildtool/build_engine/base_maps/data_rule/RULES b/test/buildtool/build_engine/base_maps/data_rule/RULES
new file mode 100644
index 00000000..ab25016f
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/data_rule/RULES
@@ -0,0 +1,232 @@
+{
+ "test_empty_rule": {
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_rule_fields": {
+ "string_fields": ["foo"],
+ "target_fields": ["bar"],
+ "config_fields": ["baz"],
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_config_transitions_target_via_field": {
+ "target_fields": ["target"],
+ "config_transitions": {
+ "target": [{
+ "type": "empty_map"
+ }]
+ },
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_config_transitions_target_via_implicit": {
+ "implicit": {
+ "target": [
+ ["module", "name"]
+ ]
+ },
+ "config_transitions": {
+ "target": [{
+ "type": "empty_map"
+ }]
+ },
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_config_transitions_canonicalness": {
+ "target_fields": ["foo", "bar"],
+ "string_fields": ["quux", "corge"],
+ "config_fields": ["grault", "garply"],
+ "implicit": {
+ "baz": [
+ ["module", "name"]
+ ],
+ "qux": [
+ ["module", "name"]
+ ]
+ },
+ "config_transitions": {
+ "bar": [{
+ "type": "singleton_map",
+ "key": "exists",
+ "value": true
+ }],
+ "qux": [{
+ "type": "singleton_map",
+ "key": "defined",
+ "value": true
+ }]
+ },
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_call_import": {
+ "config_vars": ["FOO"],
+ "imports": {
+ "compose_foo": [
+ "composers",
+ "foo_composer"
+ ]
+ },
+ "expression": {
+ "type": "CALL_EXPRESSION",
+ "name": "compose_foo"
+ }
+ },
+ "test_string_kw_conflict": {
+ "string_fields": ["foo", "type", "bar"],
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_target_kw_conflict": {
+ "target_fields": ["foo", "arguments_config", "bar"],
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_config_kw_conflict": {
+ "config_fields": ["foo", "type", "bar"],
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_implicit_kw_conflict": {
+ "implicit": {
+ "foo": [],
+ "arguments_config": [],
+ "bar": []
+ },
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_string_target_conflict": {
+ "string_fields": ["foo", "bar"],
+ "target_fields": ["bar", "baz"],
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_target_config_conflict": {
+ "target_fields": ["foo", "bar"],
+ "config_fields": ["bar", "baz"],
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_config_implicit_conflict": {
+ "config_fields": ["foo", "bar"],
+ "implicit": {
+ "bar": [
+ ["module", "name"]
+ ],
+ "baz": [
+ ["module", "name"]
+ ]
+ },
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_unknown_config_transitions_target": {
+ "config_transitions": {
+ "missing": [{
+ "type": "empty_map"
+ }]
+ },
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_missing_config_vars": {
+ "imports": {
+ "compose_foo": [
+ "composers",
+ "foo_composer"
+ ]
+ },
+ "expression": {
+ "type": "CALL_EXPRESSION",
+ "name": "compose_foo"
+ }
+ },
+ "test_missing_imports": {
+ "expression": {
+ "type": "CALL_EXPRESSION",
+ "name": "compose_foo"
+ }
+ },
+ "test_malformed_rule": "not_an_object",
+ "test_malformed_rule_expression": {
+ "missing_expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_malformed_target_fields": {
+ "target_fields": "not_a_list",
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_malformed_string_fields": {
+ "string_fields": "not_a_list",
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_malformed_config_fields": {
+ "config_fields": "not_a_list",
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_malformed_implicit": {
+ "implicit": "not_an_object",
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_malformed_implicit_entry": {
+ "implicit": {
+ "target": "not_a_list"
+ },
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_malformed_implicit_entity_name": {
+ "implicit": {
+ "target": [
+ ["module_without_name"]
+ ]
+ },
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_malformed_config_vars": {
+ "config_vars": "not_a_list",
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_malformed_config_transitions": {
+ "config_transitions": "not_an_object",
+ "expression": {
+ "type": "RESULT"
+ }
+ },
+ "test_malformed_imports": {
+ "imports": "not_an_object",
+ "expression": {
+ "type": "RESULT"
+ }
+ }
+}
diff --git a/test/buildtool/build_engine/base_maps/data_rule/composers/EXPRESSIONS b/test/buildtool/build_engine/base_maps/data_rule/composers/EXPRESSIONS
new file mode 100644
index 00000000..b5ca8cb8
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/data_rule/composers/EXPRESSIONS
@@ -0,0 +1,28 @@
+{
+ "foo_composer": {
+ "vars": [
+ "FOO"
+ ],
+ "expression": {
+ "type": "map_union",
+ "$1": [{
+ "type": "singleton_map",
+ "key": "type",
+ "value": "RESULT"
+ },
+ {
+ "type": "singleton_map",
+ "key": "artifacts",
+ "value": {
+ "type": "singleton_map",
+ "key": "foo",
+ "value": {
+ "type": "var",
+ "name": "FOO"
+ }
+ }
+ }
+ ]
+ }
+ }
+}
diff --git a/test/buildtool/build_engine/base_maps/data_src/file b/test/buildtool/build_engine/base_maps/data_src/file
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/data_src/file
diff --git a/test/buildtool/build_engine/base_maps/data_src/foo/bar/file b/test/buildtool/build_engine/base_maps/data_src/foo/bar/file
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/data_src/foo/bar/file
diff --git a/test/buildtool/build_engine/base_maps/directory_map.test.cpp b/test/buildtool/build_engine/base_maps/directory_map.test.cpp
new file mode 100644
index 00000000..20ddae06
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/directory_map.test.cpp
@@ -0,0 +1,87 @@
+#include <filesystem>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/base_maps/directory_map.hpp"
+#include "src/buildtool/common/repository_config.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+#include "test/buildtool/build_engine/base_maps/test_repo.hpp"
+
+namespace {
+
+using namespace BuildMaps::Base; // NOLINT
+
+void SetupConfig(bool use_git) {
+ auto root = FileRoot{kBasePath / "data_src"};
+ if (use_git) {
+ auto repo_path = CreateTestRepo();
+ REQUIRE(repo_path);
+ auto git_root = FileRoot::FromGit(*repo_path, kSrcTreeId);
+ REQUIRE(git_root);
+ root = std::move(*git_root);
+ }
+ RepositoryConfig::Instance().Reset();
+ RepositoryConfig::Instance().SetInfo(
+ "", RepositoryConfig::RepositoryInfo{root});
+}
+
+auto ReadDirectory(ModuleName const& id,
+ DirectoryEntriesMap::Consumer value_checker,
+ bool use_git = false) -> bool {
+ SetupConfig(use_git);
+ auto data_direntries = CreateDirectoryEntriesMap();
+ bool success{true};
+ {
+ TaskSystem ts;
+ data_direntries.ConsumeAfterKeysReady(
+ &ts,
+ {id},
+ std::move(value_checker),
+ [&success](std::string const& /*unused*/, bool /*unused*/) {
+ success = false;
+ });
+ }
+ return success;
+}
+
+} // namespace
+
+TEST_CASE("simple usage") {
+ bool as_expected{false};
+ auto name = ModuleName{"", "."};
+ auto consumer = [&as_expected](auto values) {
+ if (values[0]->Contains("file") &&
+ not values[0]->Contains("does_not_exist")) {
+ as_expected = true;
+ };
+ };
+
+ SECTION("via file") {
+ CHECK(ReadDirectory(name, consumer, /*use_git=*/false));
+ CHECK(as_expected);
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadDirectory(name, consumer, /*use_git=*/true));
+ CHECK(as_expected);
+ }
+}
+
+TEST_CASE("missing directory") {
+ bool as_expected{false};
+ auto name = ModuleName{"", "does_not_exist"};
+ auto consumer = [&as_expected](auto values) {
+ if (values[0]->Empty()) {
+ as_expected = true;
+ }
+ };
+
+ SECTION("via file") {
+ CHECK(ReadDirectory(name, consumer, /*use_git=*/false));
+ CHECK(as_expected);
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadDirectory(name, consumer, /*use_git=*/true));
+ CHECK(as_expected);
+ }
+}
diff --git a/test/buildtool/build_engine/base_maps/entity_name.test.cpp b/test/buildtool/build_engine/base_maps/entity_name.test.cpp
new file mode 100644
index 00000000..ddcea10b
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/entity_name.test.cpp
@@ -0,0 +1,22 @@
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+
+TEST_CASE("Normal module names") {
+ using EN = BuildMaps::Base::EntityName;
+ CHECK(EN::normal_module_name("foo/bar") == "foo/bar");
+ CHECK(EN::normal_module_name("foo/bar/") == "foo/bar");
+ CHECK(EN::normal_module_name("./foo/bar") == "foo/bar");
+ CHECK(EN::normal_module_name("/foo/bar") == "foo/bar");
+ CHECK(EN::normal_module_name("/foo/bar/.") == "foo/bar");
+ CHECK(EN::normal_module_name("/foo/bar/baz/..") == "foo/bar");
+ CHECK(EN::normal_module_name("foo/baz/../bar") == "foo/bar");
+ CHECK(EN::normal_module_name("../../../foo/bar") == "foo/bar");
+
+ CHECK(EN::normal_module_name("").empty());
+ CHECK(EN::normal_module_name(".").empty());
+ CHECK(EN::normal_module_name("./").empty());
+ CHECK(EN::normal_module_name("./.").empty());
+ CHECK(EN::normal_module_name("/").empty());
+ CHECK(EN::normal_module_name("/.").empty());
+ CHECK(EN::normal_module_name("..").empty());
+}
diff --git a/test/buildtool/build_engine/base_maps/expression_map.test.cpp b/test/buildtool/build_engine/base_maps/expression_map.test.cpp
new file mode 100644
index 00000000..e8068475
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/expression_map.test.cpp
@@ -0,0 +1,208 @@
+#include <filesystem>
+#include <functional>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/base_maps/expression_map.hpp"
+#include "src/buildtool/build_engine/base_maps/json_file_map.hpp"
+#include "src/buildtool/common/repository_config.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+#include "test/buildtool/build_engine/base_maps/test_repo.hpp"
+
+namespace {
+
+using namespace BuildMaps::Base; // NOLINT
+
+void SetupConfig(bool use_git) {
+ auto root = FileRoot{kBasePath / "data_expr"};
+ if (use_git) {
+ auto repo_path = CreateTestRepo();
+ REQUIRE(repo_path);
+ auto git_root = FileRoot::FromGit(*repo_path, kExprTreeId);
+ REQUIRE(git_root);
+ root = std::move(*git_root);
+ }
+ RepositoryConfig::Instance().Reset();
+ RepositoryConfig::Instance().SetInfo(
+ "", RepositoryConfig::RepositoryInfo{root});
+}
+
+auto ReadExpressionFunction(EntityName const& id,
+ ExpressionFunctionMap::Consumer value_checker,
+ bool use_git = false) -> bool {
+ SetupConfig(use_git);
+ auto expr_file_map = CreateExpressionFileMap(0);
+ auto expr_func_map = CreateExpressionMap(&expr_file_map);
+
+ bool success{true};
+ {
+ TaskSystem ts;
+ expr_func_map.ConsumeAfterKeysReady(
+ &ts,
+ {id},
+ std::move(value_checker),
+ [&success](std::string const& /*unused*/, bool /*unused*/) {
+ success = false;
+ });
+ }
+ return success;
+}
+
+} // namespace
+
+TEST_CASE("Simple expression object literal", "[expression_map]") {
+ auto name = EntityName{"", ".", "test_expression_literal"};
+ auto consumer = [](auto values) {
+ REQUIRE(*values[0]);
+ auto expr = (*values[0])->Evaluate({}, {});
+
+ REQUIRE(expr);
+ REQUIRE(expr->IsString());
+ CHECK(expr == Expression::FromJson(R"("foo")"_json));
+ };
+
+ SECTION("via file") {
+ CHECK(ReadExpressionFunction(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadExpressionFunction(name, consumer, /*use_git=*/true));
+ }
+}
+
+TEST_CASE("Simple read of variable", "[expression_map]") {
+ auto name = EntityName{"", ".", "test_read_vars"};
+ auto consumer = [](auto values) {
+ REQUIRE(*values[0]);
+ auto expr = (*values[0])
+ ->Evaluate(Configuration{Expression::FromJson(
+ R"({"FOO": "bar"})"_json)},
+ {});
+
+ REQUIRE(expr);
+ REQUIRE(expr->IsString());
+ CHECK(expr == Expression{std::string{"bar"}});
+ };
+
+ SECTION("via file") {
+ CHECK(ReadExpressionFunction(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadExpressionFunction(name, consumer, /*use_git=*/true));
+ }
+}
+
+TEST_CASE("Simple call of imported expression", "[expression_map]") {
+ auto name = EntityName{"", ".", "test_call_import"};
+ auto consumer = [](auto values) {
+ REQUIRE(*values[0]);
+ auto expr = (*values[0])
+ ->Evaluate(Configuration{Expression::FromJson(
+ R"({"FOO": "bar"})"_json)},
+ {});
+
+ REQUIRE(expr);
+ REQUIRE(expr->IsString());
+ CHECK(expr == Expression{std::string{"bar"}});
+ };
+
+ SECTION("via file") {
+ CHECK(ReadExpressionFunction(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadExpressionFunction(name, consumer, /*use_git=*/true));
+ }
+}
+
+TEST_CASE("Overwrite import in nested expression", "[expression_map]") {
+ auto name = EntityName{"", ".", "test_overwrite_import"};
+ auto consumer = [](auto values) {
+ REQUIRE(*values[0]);
+ auto expr = (*values[0])
+ ->Evaluate(Configuration{Expression::FromJson(
+ R"({"FOO": "bar"})"_json)},
+ {});
+
+ REQUIRE(expr);
+ REQUIRE(expr->IsString());
+ CHECK(expr == Expression{std::string{"bar"}});
+ };
+
+ SECTION("via file") {
+ CHECK(ReadExpressionFunction(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadExpressionFunction(name, consumer, /*use_git=*/true));
+ }
+}
+
+TEST_CASE("Fail due to unkown ID", "[expression_map]") {
+ auto name = EntityName{"", ".", "does_not_exist"};
+ auto consumer = [](auto /*values*/) {
+ CHECK(false); // should never be called
+ };
+
+ SECTION("via file") {
+ CHECK_FALSE(ReadExpressionFunction(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK_FALSE(ReadExpressionFunction(name, consumer, /*use_git=*/true));
+ }
+}
+
+TEST_CASE("Fail due to missing vars", "[expression_map]") {
+ CHECK(
+ ReadExpressionFunction({"", ".", "test_missing_vars"}, [](auto values) {
+ REQUIRE(*values[0]);
+ auto expr = (*values[0])
+ ->Evaluate(Configuration{Expression::FromJson(
+ R"({"FOO": "bar"})"_json)},
+ {});
+
+ CHECK(expr == Expression::FromJson(R"(null)"_json));
+ }));
+}
+
+TEST_CASE("Fail due to missing imports", "[expression_map]") {
+ CHECK(ReadExpressionFunction(
+ {"", ".", "test_missing_imports"}, [](auto values) {
+ REQUIRE(*values[0]);
+ auto expr = (*values[0])
+ ->Evaluate(Configuration{Expression::FromJson(
+ R"({"FOO": "bar"})"_json)},
+ {});
+
+ CHECK_FALSE(expr);
+ }));
+}
+
+TEST_CASE("Malformed function", "[expression_map]") {
+ CHECK_FALSE(ReadExpressionFunction(
+ {"", ".", "test_malformed_function"}, [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+}
+
+TEST_CASE("Malformed expression", "[expression_map]") {
+ CHECK_FALSE(ReadExpressionFunction(
+ {"", ".", "test_malformed_expression"}, [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+}
+
+TEST_CASE("Malformed vars", "[expression_map]") {
+ CHECK_FALSE(ReadExpressionFunction(
+ {"", ".", "test_malformed_vars"}, [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+}
+
+TEST_CASE("Malformed imports", "[expression_map]") {
+ CHECK_FALSE(ReadExpressionFunction(
+ {"", ".", "test_malformed_imports"}, [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+}
diff --git a/test/buildtool/build_engine/base_maps/json_file_map.test.cpp b/test/buildtool/build_engine/base_maps/json_file_map.test.cpp
new file mode 100644
index 00000000..a2c5baed
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/json_file_map.test.cpp
@@ -0,0 +1,135 @@
+#include <filesystem>
+#include <utility>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/base_maps/json_file_map.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+#include "test/buildtool/build_engine/base_maps/test_repo.hpp"
+
+namespace {
+
+using namespace BuildMaps::Base; // NOLINT
+
+void SetupConfig(std::string target_file_name, bool use_git) {
+ auto root = FileRoot{kBasePath};
+ if (use_git) {
+ auto repo_path = CreateTestRepo();
+ REQUIRE(repo_path);
+ auto git_root = FileRoot::FromGit(*repo_path, kJsonTreeId);
+ REQUIRE(git_root);
+ root = std::move(*git_root);
+ }
+ auto info = RepositoryConfig::RepositoryInfo{root};
+ info.target_file_name = std::move(target_file_name);
+ RepositoryConfig::Instance().Reset();
+ RepositoryConfig::Instance().SetInfo("", std::move(info));
+}
+
+template <bool kMandatory = true>
+auto ReadJsonFile(std::string const& target_file_name,
+ ModuleName const& id,
+ JsonFileMap::Consumer value_checker,
+ bool use_git = false,
+ std::optional<JsonFileMap::FailureFunction> fail_func =
+ std::nullopt) -> bool {
+ SetupConfig(target_file_name, use_git);
+ auto json_files = CreateJsonFileMap<&RepositoryConfig::WorkspaceRoot,
+ &RepositoryConfig::TargetFileName,
+ kMandatory>(0);
+ bool success{true};
+ {
+ TaskSystem ts;
+ json_files.ConsumeAfterKeysReady(
+ &ts,
+ {id},
+ std::move(value_checker),
+ [&success](std::string const& /*unused*/, bool /*unused*/) {
+ success = false;
+ },
+ fail_func ? std::move(*fail_func) : [] {});
+ }
+ return success;
+}
+
+} // namespace
+
+TEST_CASE("simple usage") {
+ bool as_expected{false};
+ auto name = ModuleName{"", "data_json"};
+ auto consumer = [&as_expected](auto values) {
+ if ((*values[0])["foo"] == "bar") {
+ as_expected = true;
+ };
+ };
+
+ SECTION("via file") {
+ CHECK(ReadJsonFile("foo.json", name, consumer, /*use_git=*/false));
+ CHECK(as_expected);
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadJsonFile("foo.json", name, consumer, /*use_git=*/true));
+ CHECK(as_expected);
+ }
+}
+
+TEST_CASE("non existent") {
+ bool as_expected{false};
+ std::atomic<int> failcont_counter{0};
+
+ auto consumer = [&as_expected](auto values) {
+ // Missing optional files are expected to result in empty objects with
+ // no entries in it.
+ if (values[0]->is_object() && values[0]->empty()) {
+ as_expected = true;
+ };
+ };
+ auto fail_func = [&failcont_counter]() { ++failcont_counter; };
+
+ SECTION("optional") {
+ auto name = ModuleName{"", "missing"};
+
+ SECTION("via file") {
+ CHECK(ReadJsonFile<false>(
+ "foo.json", name, consumer, /*use_git=*/false, fail_func));
+ CHECK(as_expected);
+ CHECK(failcont_counter == 0);
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadJsonFile<false>(
+ "foo.json", name, consumer, /*use_git=*/true, fail_func));
+ CHECK(as_expected);
+ CHECK(failcont_counter == 0);
+ }
+ }
+
+ SECTION("mandatory") {
+ auto name = ModuleName{"", "missing"};
+
+ SECTION("via file") {
+ CHECK_FALSE(ReadJsonFile<true>(
+ "foo.json", name, consumer, /*use_git=*/false, fail_func));
+ CHECK_FALSE(as_expected);
+ CHECK(failcont_counter == 1);
+ }
+
+ SECTION("via git tree") {
+ CHECK_FALSE(ReadJsonFile<true>(
+ "foo.json", name, consumer, /*use_git=*/true, fail_func));
+ CHECK_FALSE(as_expected);
+ CHECK(failcont_counter == 1);
+ }
+ }
+}
+
+TEST_CASE("Bad syntax") {
+ std::atomic<int> failcont_counter{0};
+ CHECK_FALSE(ReadJsonFile(
+ "bad.json",
+ {"", "data_json"},
+ [](auto const& /* unused */) {},
+ /*use_git=*/false,
+ [&failcont_counter]() { failcont_counter++; }));
+ CHECK(failcont_counter == 1);
+}
diff --git a/test/buildtool/build_engine/base_maps/rule_map.test.cpp b/test/buildtool/build_engine/base_maps/rule_map.test.cpp
new file mode 100644
index 00000000..c0e64675
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/rule_map.test.cpp
@@ -0,0 +1,348 @@
+#include <filesystem>
+#include <functional>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/base_maps/expression_map.hpp"
+#include "src/buildtool/build_engine/base_maps/json_file_map.hpp"
+#include "src/buildtool/build_engine/base_maps/rule_map.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+#include "test/buildtool/build_engine/base_maps/test_repo.hpp"
+
+namespace {
+
+using namespace BuildMaps::Base; // NOLINT
+
+void SetupConfig(bool use_git) {
+ auto root = FileRoot{kBasePath / "data_rule"};
+ if (use_git) {
+ auto repo_path = CreateTestRepo();
+ REQUIRE(repo_path);
+ auto git_root = FileRoot::FromGit(*repo_path, kRuleTreeId);
+ REQUIRE(git_root);
+ root = std::move(*git_root);
+ }
+ RepositoryConfig::Instance().Reset();
+ RepositoryConfig::Instance().SetInfo(
+ "", RepositoryConfig::RepositoryInfo{root});
+}
+
+auto ReadUserRule(EntityName const& id,
+ UserRuleMap::Consumer value_checker,
+ bool use_git = false) -> bool {
+ SetupConfig(use_git);
+ auto expr_file_map = CreateExpressionFileMap(0);
+ auto expr_func_map = CreateExpressionMap(&expr_file_map);
+ auto rule_file_map = CreateRuleFileMap(0);
+ auto user_rule_map = CreateRuleMap(&rule_file_map, &expr_func_map);
+
+ bool success{true};
+ {
+ TaskSystem ts;
+ user_rule_map.ConsumeAfterKeysReady(
+ &ts,
+ {id},
+ std::move(value_checker),
+ [&success](std::string const& /*unused*/, bool /*unused*/) {
+ success = false;
+ });
+ }
+ return success;
+}
+
+} // namespace
+
+TEST_CASE("Test empty rule", "[expression_map]") {
+ auto name = EntityName{"", ".", "test_empty_rule"};
+ auto consumer = [](auto values) { REQUIRE(values[0]); };
+
+ SECTION("via file") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/true));
+ }
+}
+
+TEST_CASE("Test rule fields", "[rule_map]") {
+ auto name = EntityName{"", ".", "test_rule_fields"};
+ auto consumer = [](auto values) {
+ REQUIRE(*values[0]);
+ REQUIRE_FALSE((*values[0])->StringFields().empty());
+ REQUIRE_FALSE((*values[0])->TargetFields().empty());
+ REQUIRE_FALSE((*values[0])->ConfigFields().empty());
+ CHECK((*values[0])->StringFields().at(0) == "foo");
+ CHECK((*values[0])->TargetFields().at(0) == "bar");
+ CHECK((*values[0])->ConfigFields().at(0) == "baz");
+ };
+
+ SECTION("via file") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/true));
+ }
+}
+
+TEST_CASE("Test config_transitions target", "[rule_map]") {
+ auto consumer = [](auto values) { REQUIRE(*values[0]); };
+
+ SECTION("via field") {
+ auto name =
+ EntityName{"", ".", "test_config_transitions_target_via_field"};
+
+ SECTION("via file") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/true));
+ }
+ }
+ SECTION("via implicit") {
+ auto name =
+ EntityName{"", ".", "test_config_transitions_target_via_implicit"};
+
+ SECTION("via file") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/true));
+ }
+ }
+}
+
+TEST_CASE("Test config_transitions canonicalness", "[rule_map]") {
+ auto name = EntityName{"", ".", "test_config_transitions_canonicalness"};
+ auto consumer = [](auto values) {
+ REQUIRE(*values[0]);
+ auto const& transitions = (*values[0])->ConfigTransitions();
+ REQUIRE(transitions.size() == 4);
+ REQUIRE(transitions.at("foo"));
+ REQUIRE(transitions.at("bar"));
+ REQUIRE(transitions.at("baz"));
+ REQUIRE(transitions.at("qux"));
+ auto foo = transitions.at("foo")->Evaluate({}, {});
+ auto bar = transitions.at("bar")->Evaluate({}, {});
+ auto baz = transitions.at("baz")->Evaluate({}, {});
+ auto qux = transitions.at("qux")->Evaluate({}, {});
+ CHECK(foo == Expression::FromJson(R"([{}])"_json));
+ CHECK(bar == Expression::FromJson(R"([{"exists": true}])"_json));
+ CHECK(baz == Expression::FromJson(R"([{}])"_json));
+ CHECK(qux == Expression::FromJson(R"([{"defined": true}])"_json));
+ };
+
+ SECTION("via file") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/true));
+ }
+}
+
+TEST_CASE("Test call of imported expression", "[rule_map]") {
+ auto name = EntityName{"", ".", "test_call_import"};
+ auto consumer = [](auto values) {
+ REQUIRE(*values[0]);
+ auto expr = (*values[0])->Expression();
+
+ REQUIRE(expr);
+ auto result = expr->Evaluate(
+ Configuration{Expression::FromJson(R"({"FOO": "bar"})"_json)}, {});
+
+ REQUIRE(result);
+ REQUIRE(result->IsMap());
+ CHECK(result["type"] == Expression{std::string{"RESULT"}});
+ CHECK(result["artifacts"] ==
+ Expression::FromJson(R"({"foo": "bar"})"_json));
+ };
+
+ SECTION("via file") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadUserRule(name, consumer, /*use_git=*/true));
+ }
+}
+
+TEST_CASE("Fail due to unknown ID", "[rule_map]") {
+ auto name = EntityName{"", ".", "does_not_exist"};
+ auto consumer = [](auto /*values*/) {
+ CHECK(false); // should never be called
+ };
+
+ SECTION("via file") {
+ CHECK_FALSE(ReadUserRule(name, consumer, /*use_git=*/false));
+ }
+
+ SECTION("via git tree") {
+ CHECK_FALSE(ReadUserRule(name, consumer, /*use_git=*/true));
+ }
+}
+
+TEST_CASE("Fail due to conflicting keyword names", "[rule_map]") {
+ SECTION("string_fields") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_string_kw_conflict"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+ SECTION("target_fields") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_target_kw_conflict"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+ SECTION("config_fields") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_config_kw_conflict"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+ SECTION("implicit_fields") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_implicit_kw_conflict"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+}
+
+TEST_CASE("Fail due to conflicting field names", "[rule_map]") {
+ SECTION("string <-> target") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_string_target_conflict"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+ SECTION("target <-> config") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_target_config_conflict"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+ SECTION("config <-> implicit") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_config_implicit_conflict"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+}
+
+TEST_CASE("Fail due to unknown config_transitions target", "[rule_map]") {
+ CHECK_FALSE(
+ ReadUserRule({"", ".", "test_unknown_config_transitions_target"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+}
+
+TEST_CASE("missing config_vars", "[rule_map]") {
+ CHECK(ReadUserRule({"", ".", "test_missing_config_vars"}, [](auto values) {
+ REQUIRE(*values[0]);
+ auto expr = (*values[0])->Expression();
+
+ REQUIRE(expr);
+ auto result = expr->Evaluate(
+ Configuration{Expression::FromJson(R"({"FOO": "bar"})"_json)}, {});
+
+ CHECK(result["artifacts"]["foo"] ==
+ Expression::FromJson(R"(null)"_json));
+ }));
+}
+
+TEST_CASE("Fail due to missing imports", "[rule_map]") {
+ CHECK(ReadUserRule({"", ".", "test_missing_imports"}, [](auto values) {
+ REQUIRE(*values[0]);
+ auto expr = (*values[0])->Expression();
+
+ REQUIRE(expr);
+ auto result = expr->Evaluate(
+ Configuration{Expression::FromJson(R"({"FOO": "bar"})"_json)}, {});
+
+ CHECK_FALSE(result);
+ }));
+}
+
+TEST_CASE("Malformed rule description", "[rule_map]") {
+ SECTION("Malformed rule") {
+ CHECK_FALSE(
+ ReadUserRule({"", ".", "test_malformed_rule"}, [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+
+ SECTION("Malformed rule expression") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_malformed_rule_expression"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+
+ SECTION("Malformed target_fields") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_malformed_target_fields"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+
+ SECTION("Malformed string_fields") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_malformed_string_fields"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+
+ SECTION("Malformed config_fields") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_malformed_config_fields"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+
+ SECTION("Malformed implicit") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_malformed_implicit"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+
+ SECTION("Malformed implicit entry") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_malformed_implicit_entry"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+
+ SECTION("Malformed implicit entity name") {
+ CHECK_FALSE(
+ ReadUserRule({"", ".", "test_malformed_implicit_entity_name"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+
+ SECTION("Malformed config_vars") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_malformed_config_vars"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+
+ SECTION("Malformed config_transitions") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_malformed_config_transitions"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+
+ SECTION("Malformed imports") {
+ CHECK_FALSE(ReadUserRule({"", ".", "test_malformed_imports"},
+ [](auto /*values*/) {
+ CHECK(false); // should never be called
+ }));
+ }
+}
diff --git a/test/buildtool/build_engine/base_maps/source_map.test.cpp b/test/buildtool/build_engine/base_maps/source_map.test.cpp
new file mode 100644
index 00000000..a9ffba98
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/source_map.test.cpp
@@ -0,0 +1,144 @@
+#include <filesystem>
+#include <utility>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/base_maps/directory_map.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/base_maps/source_map.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+#include "test/buildtool/build_engine/base_maps/test_repo.hpp"
+
+namespace {
+
+using namespace BuildMaps::Base; // NOLINT
+
+void SetupConfig(bool use_git) {
+ auto root = FileRoot{kBasePath / "data_src"};
+ if (use_git) {
+ auto repo_path = CreateTestRepo();
+ REQUIRE(repo_path);
+ auto git_root = FileRoot::FromGit(*repo_path, kSrcTreeId);
+ REQUIRE(git_root);
+ root = std::move(*git_root);
+ }
+ RepositoryConfig::Instance().Reset();
+ RepositoryConfig::Instance().SetInfo(
+ "", RepositoryConfig::RepositoryInfo{root});
+}
+
+auto ReadSourceTarget(
+ EntityName const& id,
+ SourceTargetMap::Consumer consumer,
+ bool use_git = false,
+ std::optional<SourceTargetMap::FailureFunction> fail_func = std::nullopt)
+ -> bool {
+ SetupConfig(use_git);
+ auto directory_entries = CreateDirectoryEntriesMap();
+ auto source_artifacts = CreateSourceTargetMap(&directory_entries);
+ std::string error_msg;
+ bool success{true};
+ {
+ TaskSystem ts;
+ source_artifacts.ConsumeAfterKeysReady(
+ &ts,
+ {id},
+ std::move(consumer),
+ [&success, &error_msg](std::string const& msg, bool /*unused*/) {
+ success = false;
+ error_msg = msg;
+ },
+ fail_func ? std::move(*fail_func) : [] {});
+ }
+ return success and error_msg.empty();
+}
+
+} // namespace
+
+TEST_CASE("from file") {
+ nlohmann::json artifacts;
+ auto name = EntityName{"", ".", "file"};
+ auto consumer = [&artifacts](auto values) {
+ artifacts = (*values[0])->Artifacts()->ToJson();
+ };
+
+ SECTION("via file") {
+ CHECK(ReadSourceTarget(name, consumer, /*use_git=*/false));
+ CHECK(artifacts["file"]["type"] == "LOCAL");
+ CHECK(artifacts["file"]["data"]["path"] == "file");
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadSourceTarget(name, consumer, /*use_git=*/true));
+ CHECK(artifacts["file"]["type"] == "KNOWN");
+ CHECK(artifacts["file"]["data"]["id"] ==
+ "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391");
+ CHECK(artifacts["file"]["data"]["size"] == 0);
+ }
+}
+
+TEST_CASE("not present at all") {
+ bool consumed{false};
+ bool failure_called{false};
+ auto name = EntityName{"", ".", "does_not_exist"};
+ auto consumer = [&consumed](auto /*unused*/) { consumed = true; };
+ auto fail_func = [&failure_called]() { failure_called = true; };
+
+ SECTION("via file") {
+ CHECK_FALSE(
+ ReadSourceTarget(name, consumer, /*use_git=*/false, fail_func));
+ CHECK_FALSE(consumed);
+ CHECK(failure_called);
+ }
+
+ SECTION("via git tree") {
+ CHECK_FALSE(
+ ReadSourceTarget(name, consumer, /*use_git=*/true, fail_func));
+ CHECK_FALSE(consumed);
+ CHECK(failure_called);
+ }
+}
+
+TEST_CASE("malformed entry") {
+ bool consumed{false};
+ bool failure_called{false};
+ auto name = EntityName{"", ".", "bad_entry"};
+ auto consumer = [&consumed](auto /*unused*/) { consumed = true; };
+ auto fail_func = [&failure_called]() { failure_called = true; };
+
+ SECTION("via git tree") {
+ CHECK_FALSE(
+ ReadSourceTarget(name, consumer, /*use_git=*/false, fail_func));
+ CHECK_FALSE(consumed);
+ CHECK(failure_called);
+ }
+
+ SECTION("via git tree") {
+ CHECK_FALSE(
+ ReadSourceTarget(name, consumer, /*use_git=*/true, fail_func));
+ CHECK_FALSE(consumed);
+ CHECK(failure_called);
+ }
+}
+
+TEST_CASE("subdir file") {
+ nlohmann::json artifacts;
+ auto name = EntityName{"", "foo", "bar/file"};
+ auto consumer = [&artifacts](auto values) {
+ artifacts = (*values[0])->Artifacts()->ToJson();
+ };
+
+ SECTION("via file") {
+ CHECK(ReadSourceTarget(name, consumer, /*use_git=*/false));
+ CHECK(artifacts["bar/file"]["type"] == "LOCAL");
+ CHECK(artifacts["bar/file"]["data"]["path"] == "foo/bar/file");
+ }
+
+ SECTION("via git tree") {
+ CHECK(ReadSourceTarget(name, consumer, /*use_git=*/true));
+ CHECK(artifacts["bar/file"]["type"] == "KNOWN");
+ CHECK(artifacts["bar/file"]["data"]["id"] ==
+ "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391");
+ CHECK(artifacts["bar/file"]["data"]["size"] == 0);
+ }
+}
diff --git a/test/buildtool/build_engine/base_maps/test_repo.hpp b/test/buildtool/build_engine/base_maps/test_repo.hpp
new file mode 100644
index 00000000..1269d2da
--- /dev/null
+++ b/test/buildtool/build_engine/base_maps/test_repo.hpp
@@ -0,0 +1,41 @@
+#ifndef INCLUDED_SRC_TEST_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_TEST_REPO_HPP
+#define INCLUDED_SRC_TEST_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_TEST_REPO_HPP
+
+#include <filesystem>
+
+#include "src/buildtool/common/repository_config.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+
+static auto const kBasePath =
+ std::filesystem::path{"test/buildtool/build_engine/base_maps"};
+static auto const kBundlePath = kBasePath / "data/test_repo.bundle";
+static auto const kSrcTreeId =
+ std::string{"a35c324c6cf79354f6fd8a3c962f9ce7db801915"};
+static auto const kRuleTreeId =
+ std::string{"c6dd902c9d4e7afa8b20eb04e58503e63ecab84d"};
+static auto const kExprTreeId =
+ std::string{"4946bd21d0a5b3e0c82d6944f3d47adaf1bb66f7"};
+static auto const kJsonTreeId =
+ std::string{"6982563dfc4dcdd1362792dbbc9d8243968d1ec9"};
+
+[[nodiscard]] static inline auto GetTestDir() -> std::filesystem::path {
+ auto* tmp_dir = std::getenv("TEST_TMPDIR");
+ if (tmp_dir != nullptr) {
+ return tmp_dir;
+ }
+ return FileSystemManager::GetCurrentDirectory() / kBasePath;
+}
+
+[[nodiscard]] static inline auto CreateTestRepo()
+ -> std::optional<std::filesystem::path> {
+ auto repo_path = GetTestDir() / "test_repo" /
+ std::filesystem::path{std::tmpnam(nullptr)}.filename();
+ auto cmd = fmt::format(
+ "git clone --bare {} {}", kBundlePath.string(), repo_path.string());
+ if (std::system(cmd.c_str()) == 0) {
+ return repo_path;
+ }
+ return std::nullopt;
+}
+
+#endif // INCLUDED_SRC_TEST_BUILDTOOL_BUILD_ENGINE_BASE_MAPS_TEST_REPO_HPP
diff --git a/test/buildtool/build_engine/expression/TARGETS b/test/buildtool/build_engine/expression/TARGETS
new file mode 100644
index 00000000..83c560fa
--- /dev/null
+++ b/test/buildtool/build_engine/expression/TARGETS
@@ -0,0 +1,42 @@
+{ "linked_map":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["linked_map"]
+ , "srcs": ["linked_map.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/build_engine/expression", "linked_map"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "expression"]
+ }
+, "expression":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["expression"]
+ , "srcs": ["expression.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "expression"]
+ }
+, "configuration":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["configuration"]
+ , "srcs": ["configuration.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/build_engine/expression", "expression"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "expression"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps": ["configuration", "expression", "linked_map"]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/build_engine/expression/configuration.test.cpp b/test/buildtool/build_engine/expression/configuration.test.cpp
new file mode 100644
index 00000000..95145038
--- /dev/null
+++ b/test/buildtool/build_engine/expression/configuration.test.cpp
@@ -0,0 +1,107 @@
+#include <vector>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/expression/configuration.hpp"
+#include "test/utils/container_matchers.hpp"
+
+TEST_CASE("Access", "[configuration]") {
+ auto env =
+ Configuration{Expression::FromJson(R"({"foo": 1, "bar": 2})"_json)};
+
+ CHECK(env["foo"] == Expression::FromJson("1"_json));
+ CHECK(env[Expression::FromJson(R"("bar")"_json)] ==
+ Expression::FromJson("2"_json));
+
+ CHECK(env["baz"] == Expression::FromJson(R"(null)"_json));
+ CHECK(env[Expression::FromJson(R"("baz")"_json)] ==
+ Expression::FromJson(R"(null)"_json));
+}
+
+TEST_CASE("Update", "[configuration]") {
+ SECTION("Append") {
+ auto env = Configuration{Expression::FromJson(R"({})"_json)};
+ env = env.Update(Expression::FromJson(R"({"foo": 1})"_json));
+ CHECK(env["foo"] == Expression::FromJson("1"_json));
+
+ env = env.Update("bar", Expression::number_t{2});
+ CHECK(env["bar"] == Expression::FromJson("2"_json));
+
+ env = env.Update(Expression::map_t::underlying_map_t{
+ {"baz", ExpressionPtr{Expression::number_t{3}}}});
+ CHECK(env["baz"] == Expression::FromJson("3"_json));
+ }
+
+ SECTION("Overwrite") {
+ auto env = Configuration{
+ Expression::FromJson(R"({"foo": 1, "bar": 2, "baz" : 3})"_json)};
+ CHECK(env["foo"] == Expression::FromJson("1"_json));
+ CHECK(env["bar"] == Expression::FromJson("2"_json));
+ CHECK(env["baz"] == Expression::FromJson("3"_json));
+
+ env = env.Update(Expression::FromJson(R"({"foo": 10})"_json));
+ CHECK(env["foo"] == Expression::FromJson("10"_json));
+ CHECK(env["bar"] == Expression::FromJson("2"_json));
+ CHECK(env["baz"] == Expression::FromJson("3"_json));
+
+ env = env.Update("bar", Expression::number_t{20}); // NOLINT
+ CHECK(env["foo"] == Expression::FromJson("10"_json));
+ CHECK(env["bar"] == Expression::FromJson("20"_json));
+ CHECK(env["baz"] == Expression::FromJson("3"_json));
+
+ env = env.Update(Expression::map_t::underlying_map_t{
+ {"baz", ExpressionPtr{Expression::number_t{30}}}}); // NOLINT
+ CHECK(env["foo"] == Expression::FromJson("10"_json));
+ CHECK(env["bar"] == Expression::FromJson("20"_json));
+ CHECK(env["baz"] == Expression::FromJson("30"_json));
+ }
+}
+
+TEST_CASE("Prune", "[configuration]") {
+ auto env =
+ Configuration{Expression::FromJson(R"({"foo": 1, "bar": 2})"_json)};
+ CHECK(env["foo"] == Expression::FromJson("1"_json));
+ CHECK(env["bar"] == Expression::FromJson("2"_json));
+
+ SECTION("Via string list") {
+ env = env.Prune(std::vector<std::string>{"foo", "bar", "baz"});
+ CHECK(env["foo"] == Expression::FromJson("1"_json));
+ CHECK(env["bar"] == Expression::FromJson("2"_json));
+
+ env = env.Prune(std::vector<std::string>{"foo", "bar"});
+ CHECK(env["foo"] == Expression::FromJson("1"_json));
+ CHECK(env["bar"] == Expression::FromJson("2"_json));
+
+ env = env.Prune(std::vector<std::string>{"foo"});
+ CHECK(env["foo"] == Expression::FromJson("1"_json));
+ CHECK(env["bar"] == Expression::FromJson(R"(null)"_json));
+
+ env = env.Prune(std::vector<std::string>{});
+ CHECK(env["foo"] == Expression::FromJson(R"(null)"_json));
+ CHECK(env["bar"] == Expression::FromJson(R"(null)"_json));
+ }
+
+ SECTION("Via expression") {
+ env = env.Prune(Expression::FromJson(R"(["foo", "bar", "baz"])"_json));
+ CHECK(env["foo"] == Expression::FromJson("1"_json));
+ CHECK(env["bar"] == Expression::FromJson("2"_json));
+
+ env = env.Prune(Expression::FromJson(R"(["foo", "bar"])"_json));
+ CHECK(env["foo"] == Expression::FromJson("1"_json));
+ CHECK(env["bar"] == Expression::FromJson("2"_json));
+
+ env = env.Prune(Expression::FromJson(R"(["foo"])"_json));
+ CHECK(env["foo"] == Expression::FromJson("1"_json));
+ CHECK(env["bar"] == Expression::FromJson(R"(null)"_json));
+
+ env = env.Prune(Expression::FromJson(R"([])"_json));
+ CHECK(env["foo"] == Expression::FromJson(R"(null)"_json));
+ CHECK(env["bar"] == Expression::FromJson(R"(null)"_json));
+
+ CHECK_THROWS_AS(env.Prune(Expression::FromJson(
+ R"(["not_all_string", false])"_json)),
+ Expression::ExpressionTypeError);
+
+ CHECK_THROWS_AS(env.Prune(Expression::FromJson(R"("not_a_list")"_json)),
+ Expression::ExpressionTypeError);
+ }
+}
diff --git a/test/buildtool/build_engine/expression/expression.test.cpp b/test/buildtool/build_engine/expression/expression.test.cpp
new file mode 100644
index 00000000..180fcbe0
--- /dev/null
+++ b/test/buildtool/build_engine/expression/expression.test.cpp
@@ -0,0 +1,1401 @@
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/expression/configuration.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/buildtool/build_engine/expression/function_map.hpp"
+#include "test/utils/container_matchers.hpp"
+
+TEST_CASE("Expression access", "[expression]") { // NOLINT
+ using namespace std::string_literals;
+ using path = std::filesystem::path;
+ using none_t = Expression::none_t;
+ using number_t = Expression::number_t;
+ using artifact_t = Expression::artifact_t;
+ using result_t = Expression::result_t;
+ using list_t = Expression::list_t;
+ using map_t = Expression::map_t;
+
+ auto none = ExpressionPtr{};
+ auto boolean = ExpressionPtr{true};
+ auto number = ExpressionPtr{number_t{1}};
+ auto string = ExpressionPtr{"2"s};
+ auto artifact = ExpressionPtr{artifact_t{path{"local_path"}}};
+ auto result = ExpressionPtr{result_t{boolean, number, string}};
+ auto list = ExpressionPtr{list_t{number}};
+ auto map = ExpressionPtr{map_t{{"3"s, number}}};
+
+ SECTION("Type checks") {
+ CHECK(none->IsNone());
+
+ CHECK(boolean->IsBool());
+ CHECK_FALSE(boolean->IsNone());
+
+ CHECK(number->IsNumber());
+ CHECK_FALSE(number->IsNone());
+
+ CHECK(string->IsString());
+ CHECK_FALSE(string->IsNone());
+
+ CHECK(artifact->IsArtifact());
+ CHECK_FALSE(artifact->IsNone());
+
+ CHECK(result->IsResult());
+ CHECK_FALSE(result->IsNone());
+
+ CHECK(list->IsList());
+ CHECK_FALSE(list->IsNone());
+
+ CHECK(map->IsMap());
+ CHECK_FALSE(map->IsNone());
+ }
+
+ SECTION("Throwing accessors") {
+ CHECK(boolean->Bool() == true);
+ CHECK_THROWS_AS(boolean->Number(), Expression::ExpressionTypeError);
+
+ CHECK(number->Number() == number_t{1});
+ CHECK_THROWS_AS(number->Bool(), Expression::ExpressionTypeError);
+
+ CHECK(string->String() == "2"s);
+ CHECK_THROWS_AS(string->Artifact(), Expression::ExpressionTypeError);
+
+ CHECK(artifact->Artifact() == artifact_t{path{"local_path"}});
+ CHECK_THROWS_AS(artifact->String(), Expression::ExpressionTypeError);
+
+ CHECK(result->Result() == result_t{boolean, number, string});
+ CHECK_THROWS_AS(result->String(), Expression::ExpressionTypeError);
+
+ CHECK_THAT(list->List(), Catch::Equals<ExpressionPtr>({number}));
+ CHECK_THROWS_AS(list->Map(), Expression::ExpressionTypeError);
+
+ REQUIRE(map->Map().at("3"s) == number);
+ CHECK_THROWS_AS(map->List(), Expression::ExpressionTypeError);
+ }
+
+ SECTION("Non-throwing accessors") {
+ CHECK(none->Value<none_t>());
+
+ CHECK(boolean->Value<bool>());
+ CHECK_FALSE(boolean->Value<none_t>());
+
+ CHECK(number->Value<number_t>());
+ CHECK_FALSE(number->Value<none_t>());
+
+ CHECK(string->Value<std::string>());
+ CHECK_FALSE(string->Value<none_t>());
+
+ CHECK(artifact->Value<artifact_t>());
+ CHECK_FALSE(artifact->Value<none_t>());
+
+ CHECK(result->Value<result_t>());
+ CHECK_FALSE(result->Value<none_t>());
+
+ CHECK(list->Value<list_t>());
+ CHECK_FALSE(list->Value<none_t>());
+
+ CHECK(map->Value<map_t>());
+ CHECK_FALSE(map->Value<none_t>());
+ }
+
+ SECTION("Non-throwing comparison operator") {
+ CHECK(none == none);
+ CHECK(none == Expression{});
+ CHECK(none == Expression::FromJson("null"_json));
+ CHECK(none != Expression{false});
+ CHECK(none != Expression{number_t{0}});
+ CHECK(none != Expression{""s});
+ CHECK(none != Expression{"0"s});
+ CHECK(none != Expression{list_t{}});
+ CHECK(none != Expression{map_t{}});
+
+ CHECK(boolean == boolean);
+ CHECK(boolean == true);
+ CHECK(boolean == Expression{true});
+ CHECK(boolean == Expression::FromJson("true"_json));
+ CHECK(boolean != false);
+ CHECK(boolean != Expression{false});
+ CHECK(boolean != number_t{1});
+ CHECK(boolean != number);
+ CHECK(boolean != Expression::FromJson("false"_json));
+
+ CHECK(number == number);
+ CHECK(number == number_t{1});
+ CHECK(number == Expression{number_t{1}});
+ CHECK(number == Expression::FromJson("1"_json));
+ CHECK(number != number_t{});
+ CHECK(number != Expression{number_t{}});
+ CHECK(number != true);
+ CHECK(number != boolean);
+ CHECK(number != Expression::FromJson("0"_json));
+
+ CHECK(string == string);
+ CHECK(string == "2"s);
+ CHECK(string == Expression{"2"s});
+ CHECK(string == Expression::FromJson(R"("2")"_json));
+ CHECK(string != ""s);
+ CHECK(string != Expression{""s});
+ CHECK(string != artifact_t{path{"local_path"}});
+ CHECK(string != artifact);
+ CHECK(string != Expression::FromJson(R"("")"_json));
+
+ CHECK(artifact == artifact);
+ CHECK(artifact == artifact_t{path{"local_path"}});
+ CHECK(artifact == Expression{artifact_t{path{"local_path"}}});
+ CHECK(artifact != ""s);
+ CHECK(artifact != string);
+
+ CHECK(result == result);
+ CHECK(result == result_t{boolean, number, string});
+ CHECK(result == Expression{result_t{boolean, number, string}});
+ CHECK(result != ""s);
+ CHECK(result != string);
+
+ CHECK(list == list);
+ CHECK(list == list_t{number});
+ CHECK(list == Expression{list_t{number}});
+ CHECK(list == Expression::FromJson("[1]"_json));
+ CHECK(list != list_t{});
+ CHECK(list != Expression{list_t{}});
+ CHECK(list != map);
+ CHECK(list != Expression{*map});
+ CHECK(list != Expression::FromJson(R"({"1":1})"_json));
+
+ CHECK(map == map);
+ CHECK(map == map_t{{"3"s, number}});
+ CHECK(map == Expression{map_t{{"3"s, number}}});
+ CHECK(map == Expression::FromJson(R"({"3":1})"_json));
+ CHECK(map != map_t{});
+ CHECK(map != Expression{map_t{}});
+ CHECK(map != list);
+ CHECK(map != Expression{*list});
+ CHECK(map != Expression::FromJson(R"(["3",1])"_json));
+
+ // compare nullptr != null != false != 0 != "" != [] != {}
+ auto exprs =
+ std::vector<ExpressionPtr>{ExpressionPtr{nullptr},
+ ExpressionPtr{artifact_t{path{""}}},
+ ExpressionPtr{result_t{}},
+ Expression::FromJson("null"_json),
+ Expression::FromJson("false"_json),
+ Expression::FromJson("0"_json),
+ Expression::FromJson(R"("")"_json),
+ Expression::FromJson("[]"_json),
+ Expression::FromJson("{}"_json)};
+ for (auto const& l : exprs) {
+ for (auto const& r : exprs) {
+ if (&l != &r) {
+ CHECK(l != r);
+ }
+ }
+ }
+ }
+
+ SECTION("Throwing access operator") {
+ // operators with argument of type size_t expect list
+ CHECK_THROWS_AS(none[0], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(boolean[0], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(number[0], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(string[0], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(artifact[0], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(result[0], Expression::ExpressionTypeError);
+ CHECK(list[0] == number);
+ CHECK_THROWS_AS(map[0], Expression::ExpressionTypeError);
+
+ // operators with argument of type std::string expect map
+ CHECK_THROWS_AS(none["3"], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(boolean["3"], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(number["3"], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(string["3"], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(artifact["3"], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(result["3"], Expression::ExpressionTypeError);
+ CHECK_THROWS_AS(list["3"], Expression::ExpressionTypeError);
+ CHECK(map["3"] == number);
+ }
+}
+
+TEST_CASE("Expression from JSON", "[expression]") {
+ auto none = Expression::FromJson("null"_json);
+ REQUIRE(none);
+ CHECK(none->IsNone());
+
+ auto boolean = Expression::FromJson("true"_json);
+ REQUIRE(boolean);
+ REQUIRE(boolean->IsBool());
+ CHECK(boolean->Bool() == true);
+
+ auto number = Expression::FromJson("1"_json);
+ REQUIRE(number);
+ REQUIRE(number->IsNumber());
+ CHECK(number->Number() == 1);
+
+ auto string = Expression::FromJson(R"("foo")"_json);
+ REQUIRE(string);
+ REQUIRE(string->IsString());
+ CHECK(string->String() == "foo");
+
+ auto list = Expression::FromJson("[]"_json);
+ REQUIRE(list);
+ REQUIRE(list->IsList());
+ CHECK(list->List().empty());
+
+ auto map = Expression::FromJson("{}"_json);
+ REQUIRE(map);
+ REQUIRE(map->IsMap());
+ CHECK(map->Map().empty());
+}
+
+namespace {
+auto TestToJson(nlohmann::json const& json) -> void {
+ auto expr = Expression::FromJson(json);
+ REQUIRE(expr);
+ CHECK(expr->ToJson() == json);
+}
+} // namespace
+
+TEST_CASE("Expression to JSON", "[expression]") {
+ TestToJson("null"_json);
+ TestToJson("true"_json);
+ TestToJson("1"_json);
+ TestToJson(R"("foo")"_json);
+ TestToJson("[]"_json);
+ TestToJson("{}"_json);
+}
+
+namespace {
+template <class T>
+concept ValidExpressionTypeOrPtr =
+ Expression::IsValidType<T>() or std::is_same_v<T, ExpressionPtr>;
+
+template <ValidExpressionTypeOrPtr T>
+auto Add(ExpressionPtr const& expr, std::string const& key, T const& by)
+ -> ExpressionPtr {
+ try {
+ auto new_map = Expression::map_t::underlying_map_t{};
+ new_map.emplace(key, by);
+ return ExpressionPtr{Expression::map_t{expr, new_map}};
+ } catch (...) {
+ return ExpressionPtr{nullptr};
+ }
+}
+
+template <ValidExpressionTypeOrPtr T>
+auto Replace(ExpressionPtr const& expr, std::string const& key, T const& by)
+ -> ExpressionPtr {
+ auto const& map = expr->Map();
+ if (not map.contains(key)) {
+ return ExpressionPtr{nullptr};
+ }
+ return Add(expr, key, by);
+}
+} // namespace
+
+TEST_CASE("Expression Evaluation", "[expression]") { // NOLINT
+ using namespace std::string_literals;
+ using number_t = Expression::number_t;
+ using list_t = Expression::list_t;
+
+ auto env = Configuration{};
+ auto fcts = FunctionMapPtr{};
+
+ auto foo = ExpressionPtr{"foo"s};
+ auto bar = ExpressionPtr{"bar"s};
+ auto baz = ExpressionPtr{"baz"s};
+
+ SECTION("list object") {
+ auto expr = Expression::FromJson(R"(["foo", "bar", "baz"])"_json);
+ REQUIRE(expr);
+ REQUIRE(expr->IsList());
+ CHECK(expr->List().size() == 3);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsList());
+ CHECK(result->List().size() == 3);
+ CHECK(*result == *expr);
+ }
+
+ SECTION("map object without type") {
+ auto expr = Expression::FromJson(R"({"foo": "bar"})"_json);
+ REQUIRE(expr);
+ auto result = expr.Evaluate(env, fcts);
+ CHECK_FALSE(result);
+ }
+
+ fcts = FunctionMap::MakePtr(
+ "literal", [](auto&& /*eval*/, auto const& expr, auto const& /*env*/) {
+ return expr->Get("$1", Expression::none_t{});
+ });
+
+ SECTION("custom function") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "literal"
+ , "$1": "PLACEHOLDER" })"_json);
+ REQUIRE(expr);
+
+ auto literal = Expression::FromJson(R"({"foo": "bar"})"_json);
+ REQUIRE(literal);
+
+ expr = Replace(expr, "$1", literal);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ CHECK(result);
+ CHECK(*result == *literal);
+ }
+
+ SECTION("var expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "var"
+ , "name": "foo" })"_json);
+ REQUIRE(expr);
+
+ auto none_result = expr.Evaluate(env, fcts);
+ CHECK(none_result == Expression::FromJson(R"(null)"_json));
+
+ env = env.Update("foo", "bar"s);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsString());
+ CHECK(result == Expression::FromJson(R"("bar")"_json));
+
+ auto overwrite = expr.Evaluate(env.Update("foo", list_t{result}), fcts);
+ REQUIRE(overwrite);
+ REQUIRE(overwrite->IsList());
+ CHECK(overwrite == Expression::FromJson(R"(["bar"])"_json));
+ }
+
+ SECTION("if expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "if"
+ , "cond": "PLACEHOLDER"
+ , "then": "success"
+ , "else": "failure" })"_json);
+ REQUIRE(expr);
+
+ SECTION("Boolean condition") {
+ expr = Replace(expr, "cond", true);
+ REQUIRE(expr);
+ auto success = expr.Evaluate(env, fcts);
+ REQUIRE(success);
+ REQUIRE(success->IsString());
+ CHECK(success == Expression::FromJson(R"("success")"_json));
+
+ expr = Replace(expr, "cond", false);
+ REQUIRE(expr);
+ auto failure = expr.Evaluate(env, fcts);
+ REQUIRE(failure);
+ REQUIRE(failure->IsString());
+ CHECK(failure == Expression::FromJson(R"("failure")"_json));
+ }
+
+ SECTION("Number condition") {
+ expr = Replace(expr, "cond", number_t{1});
+ REQUIRE(expr);
+ auto success = expr.Evaluate(env, fcts);
+ REQUIRE(success);
+ REQUIRE(success->IsString());
+ CHECK(success == Expression::FromJson(R"("success")"_json));
+
+ expr = Replace(expr, "cond", number_t{0});
+ REQUIRE(expr);
+ auto failure = expr.Evaluate(env, fcts);
+ REQUIRE(failure);
+ REQUIRE(failure->IsString());
+ CHECK(failure == Expression::FromJson(R"("failure")"_json));
+ }
+
+ SECTION("String condition") {
+ expr = Replace(expr, "cond", "false"s);
+ REQUIRE(expr);
+ auto success = expr.Evaluate(env, fcts);
+ REQUIRE(success);
+ REQUIRE(success->IsString());
+ CHECK(success == Expression::FromJson(R"("success")"_json));
+
+ expr = Replace(expr, "cond", ""s);
+ REQUIRE(expr);
+ auto fail1 = expr.Evaluate(env, fcts);
+ REQUIRE(fail1);
+ REQUIRE(fail1->IsString());
+ CHECK(fail1 == Expression::FromJson(R"("failure")"_json));
+
+ expr = Replace(expr, "cond", "0"s);
+ REQUIRE(expr);
+ auto fail2 = expr.Evaluate(env, fcts);
+ REQUIRE(fail2);
+ REQUIRE(fail2->IsString());
+ CHECK(fail2 == Expression::FromJson(R"("failure")"_json));
+
+ expr = Replace(expr, "cond", "NO"s);
+ REQUIRE(expr);
+ auto fail3 = expr.Evaluate(env, fcts);
+ REQUIRE(fail3);
+ REQUIRE(fail3->IsString());
+ CHECK(fail3 == Expression::FromJson(R"("failure")"_json));
+ }
+
+ SECTION("List condition") {
+ expr = Replace(expr, "cond", list_t{ExpressionPtr{}});
+ REQUIRE(expr);
+ auto success = expr.Evaluate(env, fcts);
+ REQUIRE(success);
+ REQUIRE(success->IsString());
+ CHECK(success == Expression::FromJson(R"("success")"_json));
+
+ expr = Replace(expr, "cond", list_t{});
+ REQUIRE(expr);
+ auto failure = expr.Evaluate(env, fcts);
+ REQUIRE(failure);
+ REQUIRE(failure->IsString());
+ CHECK(failure == Expression::FromJson(R"("failure")"_json));
+ }
+
+ SECTION("Map condition") {
+ auto literal = Expression::FromJson(
+ R"({"type": "literal", "$1": {"foo": "bar"}})"_json);
+ REQUIRE(literal);
+ expr = Replace(expr, "cond", literal);
+ REQUIRE(expr);
+ auto success = expr.Evaluate(env, fcts);
+ REQUIRE(success);
+ REQUIRE(success->IsString());
+ CHECK(success == Expression::FromJson(R"("success")"_json));
+
+ auto empty =
+ Expression::FromJson(R"({"type": "literal", "$1": {}})"_json);
+ REQUIRE(empty);
+ expr = Replace(expr, "cond", empty);
+ REQUIRE(expr);
+ auto failure = expr.Evaluate(env, fcts);
+ REQUIRE(failure);
+ REQUIRE(failure->IsString());
+ CHECK(failure == Expression::FromJson(R"("failure")"_json));
+ }
+ }
+ SECTION("cond expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "cond"
+ , "cond":
+ [ [ { "type": "=="
+ , "$1": {"type":"var", "name": "val", "default": ""}
+ , "$2": 0
+ }
+ , "number"
+ ]
+ , [ { "type": "=="
+ , "$1": {"type":"var", "name": "val", "default": ""}
+ , "$2": "0"
+ }
+ , "string"
+ ]
+ , [ { "type": "=="
+ , "$1": {"type":"var", "name": "val", "default": ""}
+ , "$2": false
+ }
+ , "boolean"
+ ]
+ , [ {"type":"var", "name": "val", "default": ""}, "first" ]
+ , [ {"type":"var", "name": "val", "default": ""}, "second" ]
+ ]})"_json);
+ REQUIRE(expr);
+
+ auto number = expr.Evaluate(env.Update("val", 0.0), fcts);
+ REQUIRE(number);
+ REQUIRE(number->IsString());
+ CHECK(number == Expression::FromJson(R"("number")"_json));
+
+ auto string = expr.Evaluate(env.Update("val", "0"s), fcts);
+ REQUIRE(string);
+ REQUIRE(string->IsString());
+ CHECK(string == Expression::FromJson(R"("string")"_json));
+
+ auto boolean = expr.Evaluate(env.Update("val", false), fcts);
+ REQUIRE(boolean);
+ REQUIRE(boolean->IsString());
+ CHECK(boolean == Expression::FromJson(R"("boolean")"_json));
+
+ auto first = expr.Evaluate(env.Update("val", true), fcts);
+ REQUIRE(first);
+ REQUIRE(first->IsString());
+ CHECK(first == Expression::FromJson(R"("first")"_json));
+
+ auto default1 = expr.Evaluate(env, fcts);
+ REQUIRE(default1);
+ REQUIRE(default1->IsList());
+ CHECK(default1 == Expression::FromJson(R"([])"_json));
+
+ expr = Add(expr, "default", "default"s);
+ auto default2 = expr.Evaluate(env, fcts);
+ REQUIRE(default2);
+ REQUIRE(default2->IsString());
+ CHECK(default2 == Expression::FromJson(R"("default")"_json));
+ }
+
+ SECTION("case expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "case"
+ , "expr": {"type": "var", "name": "val", "default": ""}
+ , "case":
+ { "foo": "FOO"
+ , "bar": {"type": "var", "name": "bar", "default": "BAR"}
+ }
+ })"_json);
+ REQUIRE(expr);
+
+ auto foo = expr.Evaluate(env.Update("val", "foo"s), fcts);
+ REQUIRE(foo);
+ REQUIRE(foo->IsString());
+ CHECK(foo == Expression::FromJson(R"("FOO")"_json));
+
+ auto bar = expr.Evaluate(env.Update("val", "bar"s), fcts);
+ REQUIRE(bar);
+ REQUIRE(bar->IsString());
+ CHECK(bar == Expression::FromJson(R"("BAR")"_json));
+
+ auto default1 = expr.Evaluate(env, fcts);
+ REQUIRE(default1);
+ REQUIRE(default1->IsList());
+ CHECK(default1 == Expression::FromJson(R"([])"_json));
+
+ expr = Add(expr, "default", "default"s);
+ auto default2 = expr.Evaluate(env, fcts);
+ REQUIRE(default2);
+ REQUIRE(default2->IsString());
+ CHECK(default2 == Expression::FromJson(R"("default")"_json));
+ }
+
+ SECTION("case* expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "case*"
+ , "expr": {"type": "var", "name": "val"}
+ , "case":
+ [ [false, "FOO"]
+ , [ {"type": "var", "name": "bar", "default": null}
+ , {"type": "var", "name": "bar", "default": "BAR"}
+ ]
+ , [0, {"type": "join", "$1": ["B", "A", "Z"]}]
+ ]
+ })"_json);
+ REQUIRE(expr);
+
+ auto foo = expr.Evaluate(env.Update("val", false), fcts);
+ REQUIRE(foo);
+ REQUIRE(foo->IsString());
+ CHECK(foo == Expression::FromJson(R"("FOO")"_json));
+
+ auto bar = expr.Evaluate(env, fcts);
+ REQUIRE(bar);
+ REQUIRE(bar->IsString());
+ CHECK(bar == Expression::FromJson(R"("BAR")"_json));
+
+ auto baz = expr.Evaluate(env.Update("val", 0.0), fcts);
+ REQUIRE(baz);
+ REQUIRE(baz->IsString());
+ CHECK(baz == Expression::FromJson(R"("BAZ")"_json));
+
+ auto default1 = expr.Evaluate(env.Update("val", ""s), fcts);
+ REQUIRE(default1);
+ REQUIRE(default1->IsList());
+ CHECK(default1 == Expression::FromJson(R"([])"_json));
+
+ expr = Add(expr, "default", "default"s);
+ auto default2 = expr.Evaluate(env.Update("val", ""s), fcts);
+ REQUIRE(default2);
+ REQUIRE(default2->IsString());
+ CHECK(default2 == Expression::FromJson(R"("default")"_json));
+ }
+
+ SECTION("== expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "=="
+ , "$1": "foo"
+ , "$2": "PLACEHOLDER"})"_json);
+ REQUIRE(expr);
+
+ expr = Replace(expr, "$2", "foo"s);
+ REQUIRE(expr);
+ auto success = expr.Evaluate(env, fcts);
+ REQUIRE(success);
+ REQUIRE(success->IsBool());
+ CHECK(success == Expression::FromJson("true"_json));
+
+ expr = Replace(expr, "$2", "bar"s);
+ REQUIRE(expr);
+ auto failure = expr.Evaluate(env, fcts);
+ REQUIRE(failure);
+ REQUIRE(failure->IsBool());
+ CHECK(failure == Expression::FromJson("false"_json));
+ }
+
+ SECTION("and expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "and"
+ , "$1": "PLACEHOLDER" })"_json);
+ REQUIRE(expr);
+
+ auto empty = ExpressionPtr{""s};
+
+ expr = Replace(expr, "$1", list_t{foo, bar});
+ REQUIRE(expr);
+ auto success = expr.Evaluate(env, fcts);
+ REQUIRE(success);
+ REQUIRE(success->IsBool());
+ CHECK(success == Expression::FromJson("true"_json));
+
+ expr = Replace(expr, "$1", list_t{foo, empty});
+ REQUIRE(expr);
+ auto failure = expr.Evaluate(env, fcts);
+ REQUIRE(failure);
+ REQUIRE(failure->IsBool());
+ CHECK(failure == Expression::FromJson("false"_json));
+
+ // test evaluation of list elements
+ expr = Replace(expr, "$1", list_t{foo, Expression::FromJson(R"(
+ {"type": "literal"
+ , "$1": true})"_json)});
+ REQUIRE(expr);
+ auto evaluated = expr.Evaluate(env, fcts);
+ REQUIRE(evaluated);
+ REQUIRE(evaluated->IsBool());
+ CHECK(evaluated == Expression::FromJson("true"_json));
+
+ // test short-circuit evaluation of logical and (static list)
+ auto static_list =
+ R"([true, false, {"type": "fail", "msg": "failed"}])"_json;
+ expr = Replace(expr, "$1", Expression::FromJson(static_list));
+ REQUIRE(expr);
+ auto static_result = expr.Evaluate(env, fcts);
+ REQUIRE(static_result);
+ REQUIRE(static_result->IsBool());
+ CHECK(static_result == Expression::FromJson("false"_json));
+
+ // test full evaluation of dynamic list (expression evaluating to list)
+ auto dynamic_list =
+ nlohmann::json{{"type", "context"}, {"$1", static_list}};
+ expr = Replace(expr, "$1", Expression::FromJson(dynamic_list));
+ REQUIRE(expr);
+ auto dyn_result = expr.Evaluate(env, fcts);
+ REQUIRE_FALSE(dyn_result);
+ }
+
+ SECTION("or expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "or"
+ , "$1": "PLACEHOLDER" })"_json);
+ REQUIRE(expr);
+
+ auto empty = ExpressionPtr{""s};
+
+ expr = Replace(expr, "$1", list_t{foo, bar});
+ REQUIRE(expr);
+ auto success = expr.Evaluate(env, fcts);
+ REQUIRE(success);
+ REQUIRE(success->IsBool());
+ CHECK(success == Expression::FromJson("true"_json));
+
+ expr = Replace(expr, "$1", list_t{foo, empty});
+ REQUIRE(expr);
+ auto failure = expr.Evaluate(env, fcts);
+ REQUIRE(failure);
+ REQUIRE(failure->IsBool());
+ CHECK(failure == Expression::FromJson("true"_json));
+
+ // test evaluation of list elements
+ expr = Replace(expr, "$1", list_t{foo, Expression::FromJson(R"(
+ {"type": "literal"
+ , "$1": true})"_json)});
+ REQUIRE(expr);
+ auto evaluated = expr.Evaluate(env, fcts);
+ REQUIRE(evaluated);
+ REQUIRE(evaluated->IsBool());
+ CHECK(evaluated == Expression::FromJson("true"_json));
+
+ // test short-circuit evaluation of logical or (static list)
+ auto static_list =
+ R"([false, true, {"type": "fail", "msg": "failed"}])"_json;
+ expr = Replace(expr, "$1", Expression::FromJson(static_list));
+ REQUIRE(expr);
+ auto static_result = expr.Evaluate(env, fcts);
+ REQUIRE(static_result);
+ REQUIRE(static_result->IsBool());
+ CHECK(static_result == Expression::FromJson("true"_json));
+
+ // test full evaluation of dynamic list (expression evaluating to list)
+ auto dynamic_list =
+ nlohmann::json{{"type", "context"}, {"$1", static_list}};
+ expr = Replace(expr, "$1", Expression::FromJson(dynamic_list));
+ REQUIRE(expr);
+ auto dyn_result = expr.Evaluate(env, fcts);
+ REQUIRE_FALSE(dyn_result);
+ }
+
+ SECTION("+ expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "+"
+ , "$1": ["foo"]
+ , "$2": "PLACEHOLDER" })"_json);
+ REQUIRE(expr);
+
+ expr = Replace(expr, "$2", list_t{bar});
+ REQUIRE(expr);
+ auto success = expr.Evaluate(env, fcts);
+ REQUIRE(success);
+ REQUIRE(success->IsList());
+ CHECK(success == Expression::FromJson(R"(["foo", "bar"])"_json));
+
+ expr = Replace(expr, "$2", bar);
+ REQUIRE(expr);
+ CHECK_FALSE(expr.Evaluate(env, fcts));
+ }
+
+ SECTION("++ expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "++"
+ , "$1": [ ["foo"]
+ , ["bar", "baz"]]})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsList());
+ CHECK(result == Expression::FromJson(R"(["foo", "bar", "baz"])"_json));
+ }
+
+ SECTION("nub_right expression") {
+ auto expr = Expression::FromJson(R"(
+ {"type": "nub_right"
+ , "$1": ["-lfoo", "-lbar", "-lbaz", "-lbar"]
+ })"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsList());
+ CHECK(result ==
+ Expression::FromJson(R"(["-lfoo", "-lbaz", "-lbar"])"_json));
+ }
+
+ SECTION("nub_right expression 2") {
+ auto expr = Expression::FromJson(R"(
+ {"type": "nub_right"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ ["libg.a"]
+ , ["libe.a", "libd.a", "libc.a", "liba.a", "libb.a"]
+ , ["libf.a", "libc.a", "libd.a", "libb.a", "liba.a"]
+ , ["libc.a", "liba.a", "libb.a"]
+ , ["libd.a", "libb.a", "liba.a"]
+ ]
+ }
+ })"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsList());
+ CHECK(result == Expression::FromJson(R"(
+ ["libg.a", "libe.a", "libf.a", "libc.a", "libd.a", "libb.a", "liba.a"]
+ )"_json));
+ }
+
+ SECTION("change_ending") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "change_ending"
+ , "$1": "PLACEHOLDER"
+ , "ending": "_suffix" })"_json);
+ REQUIRE(expr);
+
+ expr = Replace(expr, "$1", ""s);
+ REQUIRE(expr);
+ auto empty_path = expr.Evaluate(env, fcts);
+ REQUIRE(empty_path);
+ REQUIRE(empty_path->IsString());
+ CHECK(empty_path == Expression::FromJson(R"("_suffix")"_json));
+
+ expr = Replace(expr, "$1", ".rc"s);
+ REQUIRE(expr);
+ auto hidden_file = expr.Evaluate(env, fcts);
+ REQUIRE(hidden_file);
+ REQUIRE(hidden_file->IsString());
+ CHECK(hidden_file == Expression::FromJson(R"(".rc_suffix")"_json));
+
+ expr = Replace(expr, "$1", "/root/path/file.txt"s);
+ REQUIRE(expr);
+ auto full_path = expr.Evaluate(env, fcts);
+ REQUIRE(full_path);
+ REQUIRE(full_path->IsString());
+ CHECK(full_path ==
+ Expression::FromJson(R"("/root/path/file_suffix")"_json));
+ }
+
+ SECTION("basename") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "basename"
+ , "$1": "PLACEHOLDER"
+ })"_json);
+ REQUIRE(expr);
+
+ expr = Replace(expr, "$1", "foo.c"s);
+ REQUIRE(expr);
+ auto plain_file = expr.Evaluate(env, fcts);
+ REQUIRE(plain_file);
+ REQUIRE(plain_file->IsString());
+ CHECK(plain_file == Expression::FromJson(R"("foo.c")"_json));
+
+ expr = Replace(expr, "$1", "/path/to/file.txt"s);
+ REQUIRE(expr);
+ auto stripped_path = expr.Evaluate(env, fcts);
+ REQUIRE(stripped_path);
+ REQUIRE(stripped_path->IsString());
+ CHECK(stripped_path == Expression::FromJson(R"("file.txt")"_json));
+ }
+
+ SECTION("join") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "join"
+ , "$1": "PLACEHOLDER"
+ , "separator": ";" })"_json);
+ REQUIRE(expr);
+
+ expr = Replace(expr, "$1", list_t{});
+ REQUIRE(expr);
+ auto empty = expr.Evaluate(env, fcts);
+ REQUIRE(empty);
+ REQUIRE(empty->IsString());
+ CHECK(empty == Expression::FromJson(R"("")"_json));
+
+ expr = Replace(expr, "$1", list_t{foo});
+ REQUIRE(expr);
+ auto single = expr.Evaluate(env, fcts);
+ REQUIRE(single);
+ REQUIRE(single->IsString());
+ CHECK(single == Expression::FromJson(R"("foo")"_json));
+
+ expr = Replace(expr, "$1", list_t{foo, bar, baz});
+ REQUIRE(expr);
+ auto multi = expr.Evaluate(env, fcts);
+ REQUIRE(multi);
+ REQUIRE(multi->IsString());
+ CHECK(multi == Expression::FromJson(R"("foo;bar;baz")"_json));
+
+ expr = Replace(expr, "$1", foo);
+ REQUIRE(expr);
+ auto string = expr.Evaluate(env, fcts);
+ REQUIRE(string);
+ REQUIRE(string->IsString());
+ CHECK(string == Expression::FromJson(R"("foo")"_json));
+
+ // only list of strings or string is allowed
+ expr = Replace(expr, "$1", list_t{foo, ExpressionPtr{number_t{}}});
+ REQUIRE(expr);
+ CHECK_FALSE(expr.Evaluate(env, fcts));
+
+ expr = Replace(expr, "$1", number_t{});
+ REQUIRE(expr);
+ CHECK_FALSE(expr.Evaluate(env, fcts));
+ }
+
+ SECTION("join_cmd expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "join_cmd"
+ , "$1": ["foo", "bar's", "baz"]})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsString());
+ CHECK(result ==
+ Expression::FromJson(R"("'foo' 'bar'\\''s' 'baz'")"_json));
+ }
+
+ SECTION("escape_chars expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "escape_chars"
+ , "$1": "escape me X"
+ , "chars": "abcX"
+ , "escape_prefix": "X"})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsString());
+ CHECK(result == Expression::FromJson(R"("esXcXape me XX")"_json));
+ }
+
+ SECTION("keys expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "keys"
+ , "$1": { "type": "literal"
+ , "$1": { "foo": true
+ , "bar": false
+ , "baz": true }}})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsList());
+ CHECK(result == Expression::FromJson(R"(["bar", "baz", "foo"])"_json));
+ }
+
+ SECTION("values expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "values"
+ , "$1": { "type": "literal"
+ , "$1": { "foo": true
+ , "bar": "foo"
+ , "baz": 1 }}})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsList());
+ CHECK(result == Expression::FromJson(R"(["foo", 1, true])"_json));
+ }
+
+ SECTION("lookup expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "lookup"
+ , "key": "PLACEHOLDER"
+ , "map": { "type": "literal"
+ , "$1": { "foo": true
+ , "bar": 1 }}})"_json);
+ REQUIRE(expr);
+
+ expr = Replace(expr, "key", "foo"s);
+ REQUIRE(expr);
+ auto result_foo = expr.Evaluate(env, fcts);
+ REQUIRE(result_foo);
+ CHECK(result_foo == Expression::FromJson("true"_json));
+
+ expr = Replace(expr, "key", "bar"s);
+ REQUIRE(expr);
+ auto result_bar = expr.Evaluate(env, fcts);
+ REQUIRE(result_bar);
+ CHECK(result_bar == Expression::FromJson("1"_json));
+
+ // key baz is missing
+ expr = Replace(expr, "key", "baz"s);
+ REQUIRE(expr);
+ auto result_baz = expr.Evaluate(env, fcts);
+ REQUIRE(result_baz);
+ CHECK(result_baz == Expression::FromJson("null"_json));
+
+ // map is not mapping
+ expr = Replace(expr, "map", list_t{});
+ REQUIRE(expr);
+ CHECK_FALSE(expr.Evaluate(env, fcts));
+
+ // key is not string
+ expr = Replace(expr, "key", number_t{});
+ REQUIRE(expr);
+ CHECK_FALSE(expr.Evaluate(env, fcts));
+ }
+
+ SECTION("lookup with default") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "lookup"
+ , "key": "PLACEHOLDER"
+ , "map": { "type": "literal"
+ , "$1": { "foo": false
+ , "bar": 1
+ , "baz" : null}}
+ , "default" : { "type" : "join"
+ , "separator": "x"
+ , "$1": ["a", "b"]}})"_json);
+ REQUIRE(expr);
+
+ // Key present (and false)
+ expr = Replace(expr, "key", "foo"s);
+ REQUIRE(expr);
+ auto result_foo = expr.Evaluate(env, fcts);
+ REQUIRE(result_foo);
+ CHECK(result_foo == Expression::FromJson("false"_json));
+
+ // Key present but value is null
+ expr = Replace(expr, "key", "baz"s);
+ REQUIRE(expr);
+ auto result_baz = expr.Evaluate(env, fcts);
+ REQUIRE(result_baz);
+ CHECK(result_baz == Expression::FromJson(R"("axb")"_json));
+
+ // Key not present
+ expr = Replace(expr, "key", "missing"s);
+ REQUIRE(expr);
+ auto result_missing = expr.Evaluate(env, fcts);
+ REQUIRE(result_missing);
+ CHECK(result_missing == Expression::FromJson(R"("axb")"_json));
+ }
+
+ SECTION("empty_map expression") {
+ auto expr = Expression::FromJson(R"({"type": "empty_map"})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsMap());
+ CHECK(result == Expression::FromJson("{}"_json));
+ }
+
+ SECTION("singleton_map expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "singleton_map"
+ , "key": "foo"
+ , "value": "bar"})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsMap());
+ CHECK(result == Expression::FromJson(R"({"foo": "bar"})"_json));
+ }
+
+ SECTION("disjoint_map_union expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "disjoint_map_union"
+ , "$1": "PLACEHOLDER" })"_json);
+ REQUIRE(expr);
+
+ auto literal_foo = Expression::FromJson(
+ R"({"type": "literal", "$1": {"foo":true}})"_json);
+ REQUIRE(literal_foo);
+ auto literal_foo_false = Expression::FromJson(
+ R"({"type": "literal", "$1": {"foo":false}})"_json);
+ REQUIRE(literal_foo_false);
+ auto literal_bar = Expression::FromJson(
+ R"({"type": "literal", "$1": {"bar":false}})"_json);
+ REQUIRE(literal_bar);
+
+ expr = Replace(expr, "$1", list_t{literal_foo, literal_bar});
+ REQUIRE(expr);
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsMap());
+ CHECK(result ==
+ Expression::FromJson(R"({"foo": true, "bar": false})"_json));
+
+ // duplicate foo, but with same value
+ expr = Replace(expr, "$1", list_t{literal_foo, literal_foo});
+ REQUIRE(expr);
+ result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsMap());
+ CHECK(result == Expression::FromJson(R"({"foo": true})"_json));
+
+ // duplicate foo, but with different value
+ expr = Replace(expr, "$1", list_t{literal_foo, literal_foo_false});
+ REQUIRE(expr);
+ CHECK_FALSE(expr.Evaluate(env, fcts));
+
+ // empty list should produce empty map
+ expr = Replace(expr, "$1", list_t{});
+ REQUIRE(expr);
+ auto empty = expr.Evaluate(env, fcts);
+ REQUIRE(empty);
+ REQUIRE(empty->IsMap());
+ REQUIRE(empty == Expression::FromJson("{}"_json));
+ }
+
+ SECTION("map_union expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "map_union"
+ , "$1": { "type": "literal"
+ , "$1": [ {"foo": true}
+ , {"bar": false}] }})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsMap());
+ CHECK(result ==
+ Expression::FromJson(R"({"foo": true, "bar": false})"_json));
+
+ // empty list should produce empty map
+ expr = Expression::FromJson(R"({"type": "map_union", "$1": []})"_json);
+ REQUIRE(expr);
+ auto empty = expr.Evaluate(env, fcts);
+ REQUIRE(empty);
+ REQUIRE(empty->IsMap());
+ REQUIRE(empty == Expression::FromJson("{}"_json));
+ }
+
+ SECTION("to_subdir expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "to_subdir"
+ , "subdir": "prefix"
+ , "$1": { "type": "literal"
+ , "$1": { "foo": "hello"
+ , "bar": "world" }}})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsMap());
+ CHECK(result ==
+ Expression::FromJson(
+ R"({"prefix/foo": "hello", "prefix/bar": "world"})"_json));
+ }
+
+ SECTION("flat to_subdir without proper conflict") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "to_subdir"
+ , "subdir": "prefix"
+ , "flat" : "YES"
+ , "$1": { "type": "literal"
+ , "$1": { "foobar/data/foo": "hello"
+ , "foobar/include/foo": "hello"
+ , "bar": "world" }}})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsMap());
+ CHECK(result ==
+ Expression::FromJson(
+ R"({"prefix/foo": "hello", "prefix/bar": "world"})"_json));
+ }
+
+ SECTION("flat to_subdir with conflict") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "to_subdir"
+ , "subdir": "prefix"
+ , "flat" : "YES"
+ , "$1": { "type": "literal"
+ , "$1": { "foobar/data/foo": "HELLO"
+ , "foobar/include/foo": "hello"
+ , "bar": "world" }}})"_json);
+ REQUIRE(expr);
+
+ CHECK_FALSE(expr.Evaluate(env, fcts));
+ }
+
+ fcts = FunctionMap::MakePtr(
+ fcts, "concat", [](auto&& eval, auto const& expr, auto const& env) {
+ auto p1 = eval(expr->Get("$1", ""s), env);
+ auto p2 = eval(expr->Get("$2", ""s), env);
+ return ExpressionPtr{p1->String() + p2->String()};
+ });
+
+ SECTION("foreach expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "foreach"
+ , "var": "x"
+ , "range": ["foo", "bar", "baz"]
+ , "body": { "type": "concat"
+ , "$1": { "type": "var"
+ , "name": "x" }
+ , "$2": "y" }})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsList());
+ CHECK(result ==
+ Expression::FromJson(R"(["fooy", "bary", "bazy"])"_json));
+ }
+
+ SECTION("foreach_map expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "foreach_map"
+ , "var_key": "key"
+ , "var_val": "val"
+ , "body": { "type": "concat"
+ , "$1": { "type": "var"
+ , "name": "key" }
+ , "$2": { "type": "var"
+ , "name": "val" }}})"_json);
+ REQUIRE(expr);
+
+ // range is missing (should default to empty map)
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsList());
+ CHECK(result == Expression::FromJson(R"([])"_json));
+
+ // range is map with one entry
+ expr = Add(expr, "range", Expression::FromJson(R"(
+ { "type": "literal"
+ , "$1": {"foo": "bar"}})"_json));
+ REQUIRE(expr);
+
+ result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsList());
+ CHECK(result == Expression::FromJson(R"(["foobar"])"_json));
+
+ // range is map with multiple entries
+ expr = Replace(expr, "range", Expression::FromJson(R"(
+ { "type": "literal"
+ , "$1": {"foo": "bar", "bar": "baz"}})"_json));
+ REQUIRE(expr);
+
+ result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsList());
+ CHECK(result == Expression::FromJson(R"(["barbaz", "foobar"])"_json));
+
+ // fail if range is string
+ expr = Replace(expr, "range", Expression::FromJson(R"("foo")"_json));
+ REQUIRE(expr);
+ CHECK_FALSE(expr.Evaluate(env, fcts));
+
+ // fail if range is number
+ expr = Replace(expr, "range", Expression::FromJson(R"("4711")"_json));
+ REQUIRE(expr);
+ CHECK_FALSE(expr.Evaluate(env, fcts));
+
+ // fail if range is Boolean
+ expr = Replace(expr, "range", Expression::FromJson(R"("true")"_json));
+ REQUIRE(expr);
+ CHECK_FALSE(expr.Evaluate(env, fcts));
+ }
+
+ SECTION("foldl expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "foldl"
+ , "var": "x"
+ , "range": ["bar", "baz"]
+ , "accum_var": "a"
+ , "start": "foo"
+ , "body": { "type": "concat"
+ , "$1": { "type": "var"
+ , "name": "x" }
+ , "$2": { "type": "var"
+ , "name": "a" }}})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsString());
+ CHECK(result == Expression::FromJson(R"("bazbarfoo")"_json));
+ }
+
+ SECTION("let* expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "let*"
+ , "bindings": [ ["foo", "foo"]
+ , ["bar", "bar"] ]
+ , "body": { "type": "concat"
+ , "$1": { "type": "var"
+ , "name": "foo" }
+ , "$2": { "type": "var"
+ , "name": "bar" }}})"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsString());
+ CHECK(result == Expression::FromJson(R"("foobar")"_json));
+ }
+
+ SECTION("sequentiallity of let* expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "let*"
+ , "bindings":
+ [ ["one", "foo"]
+ , ["two", { "type": "join"
+ , "$1": [ {"type": "var", "name" : "one"}
+ , {"type": "var", "name" : "one"} ]}]
+ , ["four", { "type": "join"
+ , "$1": [ {"type": "var", "name" : "two"}
+ , {"type": "var", "name" : "two"} ]}]
+ ]
+ , "body": { "type" : "var"
+ , "name" : "four" }
+ })"_json);
+ REQUIRE(expr);
+
+ auto result = expr.Evaluate(env, fcts);
+ REQUIRE(result);
+ REQUIRE(result->IsString());
+ CHECK(result == Expression::FromJson(R"("foofoofoofoo")"_json));
+ }
+
+ SECTION("concat_target_name expression") {
+ auto expr = Expression::FromJson(R"(
+ { "type": "concat_target_name"
+ , "$1": "PLACEHOLDER"
+ , "$2": "_suffix" })"_json);
+ REQUIRE(expr);
+
+ expr = Replace(expr, "$1", "foo"s);
+ REQUIRE(expr);
+ auto str_result = expr.Evaluate(env, fcts);
+ REQUIRE(str_result);
+ REQUIRE(str_result->IsString());
+ CHECK(str_result == Expression::FromJson(R"("foo_suffix")"_json));
+
+ auto dep_tgt = Expression::FromJson(R"(["subdir", "bar"])"_json);
+ REQUIRE(dep_tgt);
+ expr = Replace(expr, "$1", dep_tgt);
+ REQUIRE(expr);
+ auto dep_result = expr.Evaluate(env, fcts);
+ REQUIRE(dep_result);
+ REQUIRE(dep_result->IsList());
+ CHECK(dep_result ==
+ Expression::FromJson(R"(["subdir", "bar_suffix"])"_json));
+ }
+}
+
+TEST_CASE("Expression hash computation", "[expression]") {
+ using namespace std::string_literals;
+ using path = std::filesystem::path;
+ using number_t = Expression::number_t;
+ using artifact_t = Expression::artifact_t;
+ using result_t = Expression::result_t;
+ using list_t = Expression::list_t;
+ using map_t = Expression::map_t;
+
+ auto none = ExpressionPtr{};
+ auto boolean = ExpressionPtr{false};
+ auto number = ExpressionPtr{number_t{}};
+ auto string = ExpressionPtr{""s};
+ auto artifact = ExpressionPtr{artifact_t{path{""}}};
+ auto result = ExpressionPtr{result_t{}};
+ auto list = ExpressionPtr{list_t{}};
+ auto map = ExpressionPtr{map_t{}};
+
+ CHECK_FALSE(none->ToHash().empty());
+ CHECK(none->ToHash() == Expression{}.ToHash());
+
+ CHECK_FALSE(boolean->ToHash().empty());
+ CHECK(boolean->ToHash() == Expression{false}.ToHash());
+ CHECK_FALSE(boolean->ToHash() == Expression{true}.ToHash());
+
+ CHECK_FALSE(number->ToHash().empty());
+ CHECK(number->ToHash() == Expression{number_t{}}.ToHash());
+ CHECK_FALSE(number->ToHash() == Expression{number_t{1}}.ToHash());
+
+ CHECK_FALSE(string->ToHash().empty());
+ CHECK(string->ToHash() == Expression{""s}.ToHash());
+ CHECK_FALSE(string->ToHash() == Expression{" "s}.ToHash());
+
+ CHECK_FALSE(artifact->ToHash().empty());
+ CHECK(artifact->ToHash() == Expression{artifact_t{path{""}}}.ToHash());
+ CHECK_FALSE(artifact->ToHash() ==
+ Expression{artifact_t{path{" "}}}.ToHash());
+
+ CHECK_FALSE(result->ToHash().empty());
+ CHECK(result->ToHash() == Expression{result_t{}}.ToHash());
+ CHECK_FALSE(result->ToHash() == Expression{result_t{boolean}}.ToHash());
+
+ CHECK_FALSE(list->ToHash().empty());
+ CHECK(list->ToHash() == Expression{list_t{}}.ToHash());
+ CHECK_FALSE(list->ToHash() == Expression{list_t{number}}.ToHash());
+ CHECK_FALSE(list->ToHash() == Expression{map_t{{""s, number}}}.ToHash());
+
+ CHECK_FALSE(map->ToHash().empty());
+ CHECK(map->ToHash() == Expression{map_t{}}.ToHash());
+ CHECK_FALSE(map->ToHash() == Expression{map_t{{""s, number}}}.ToHash());
+ CHECK_FALSE(map->ToHash() == Expression{list_t{string, number}}.ToHash());
+
+ auto exprs = std::vector<ExpressionPtr>{
+ none, boolean, number, string, artifact, result, list, map};
+ for (auto const& l : exprs) {
+ for (auto const& r : exprs) {
+ if (&l != &r) {
+ CHECK_FALSE(l->ToHash() == r->ToHash());
+ }
+ }
+ }
+}
diff --git a/test/buildtool/build_engine/expression/linked_map.test.cpp b/test/buildtool/build_engine/expression/linked_map.test.cpp
new file mode 100644
index 00000000..b4546f0f
--- /dev/null
+++ b/test/buildtool/build_engine/expression/linked_map.test.cpp
@@ -0,0 +1,252 @@
+#include <algorithm>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/expression/linked_map.hpp"
+#include "test/utils/container_matchers.hpp"
+
+TEST_CASE("Empty map", "[linked_map]") {
+ using map_t = LinkedMap<std::string, int>;
+
+ auto map = map_t::MakePtr(map_t::underlying_map_t{});
+ REQUIRE(map);
+ CHECK(map->empty());
+
+ auto empty_map = map_t::underlying_map_t{};
+ map = map_t::MakePtr(map, empty_map);
+ REQUIRE(map);
+ CHECK(map->empty());
+
+ auto empty_linked_map = map_t::MakePtr(empty_map);
+ map = map_t::MakePtr(map, empty_linked_map);
+ REQUIRE(map);
+ CHECK(map->empty());
+}
+
+TEST_CASE("Lookup and iteration", "[linked_map]") {
+ using map_t = LinkedMap<std::string, int>;
+ constexpr int kCount{100};
+ constexpr int kQ{10}; // kQ == gcd(kCount, kQ) && 0 < kCount / kQ < 10
+
+ auto map = map_t::MakePtr("0", 0);
+ REQUIRE(map);
+ CHECK(not map->empty());
+ CHECK(map->size() == 1);
+
+ for (int i{1}; i < kCount; ++i) {
+ auto update = map_t::underlying_map_t{{std::to_string(i / kQ), i}};
+ if (i % 2 == 0) { // update via underlying map
+ map = map_t::MakePtr(map, update);
+ }
+ else { // update via linked map ptr
+ map = map_t::MakePtr(map, map_t::MakePtr(update));
+ }
+ REQUIRE(map);
+ CHECK(map->size() == static_cast<std::size_t>((i / kQ) + 1));
+ }
+
+ SECTION("contains and lookup") {
+ for (int i{0}; i < kCount / kQ; ++i) {
+ auto key = std::to_string(i);
+ // kQ-many values per key: i -> i*kQ + [0;kQ-1], expect last
+ auto expect = i * kQ + (kQ - 1);
+ CHECK(map->contains(key));
+ CHECK(map->at(key) == expect);
+ }
+ }
+
+ SECTION("iteration via ranged-based loop") {
+ auto i = kQ - 1;
+ for (auto const& el : *map) {
+ CHECK(el.first == std::to_string(i / kQ));
+ CHECK(el.second == i);
+ i += kQ;
+ }
+ }
+
+ SECTION("iteration via algorithm") {
+ auto i = kQ - 1;
+ std::for_each(std::begin(*map), std::end(*map), [&](auto const& el) {
+ CHECK(el.first == std::to_string(i / kQ));
+ CHECK(el.second == i);
+ i += kQ;
+ });
+ }
+}
+
+class CopyCounter {
+ public:
+ CopyCounter() : count_{std::make_shared<size_t>()} {}
+ CopyCounter(CopyCounter const& other) {
+ ++(*other.count_);
+ count_ = other.count_;
+ }
+ CopyCounter(CopyCounter&&) = default;
+ ~CopyCounter() = default;
+ auto operator=(CopyCounter const& other) -> CopyCounter& {
+ if (this != &other) {
+ ++(*other.count_);
+ count_ = other.count_;
+ }
+ return *this;
+ }
+ auto operator=(CopyCounter &&) -> CopyCounter& = default;
+ [[nodiscard]] auto Count() const -> std::size_t { return *count_; }
+
+ private:
+ // all copies of this object share the same counter
+ std::shared_ptr<size_t> count_{};
+};
+
+TEST_CASE("Zero copies", "[linked_map]") {
+ using map_t = LinkedMap<std::string, CopyCounter>;
+ constexpr int kCount{100};
+
+ auto map = map_t::Ptr{};
+
+ SECTION("Via initializer list") {
+ for (int i{0}; i < kCount; ++i) {
+ map = map_t::MakePtr(map, {{std::to_string(i), CopyCounter{}}});
+ REQUIRE(map);
+ }
+
+ for (int i{0}; i < kCount; ++i) {
+ auto key = std::to_string(i);
+ REQUIRE(map->contains(key));
+ // underlaying map's initializer_list produces a single copy
+ CHECK(map->at(key).Count() == 1);
+ }
+ }
+
+ SECTION("Via pair") {
+ for (int i{0}; i < kCount; ++i) {
+ map = map_t::MakePtr(map, {std::to_string(i), CopyCounter{}});
+ REQUIRE(map);
+ }
+
+ for (int i{0}; i < kCount; ++i) {
+ auto key = std::to_string(i);
+ REQUIRE(map->contains(key));
+ CHECK(map->at(key).Count() == 0);
+ }
+ }
+
+ SECTION("Via key and value arguments") {
+ for (int i{0}; i < kCount; ++i) {
+ map = map_t::MakePtr(map, std::to_string(i), CopyCounter{});
+ REQUIRE(map);
+ }
+
+ for (int i{0}; i < kCount; ++i) {
+ auto key = std::to_string(i);
+ REQUIRE(map->contains(key));
+ CHECK(map->at(key).Count() == 0);
+ }
+ }
+
+ SECTION("Via underlaying map and emplace") {
+ for (int i{0}; i < kCount; ++i) {
+ map_t::underlying_map_t update{};
+ update.emplace(std::to_string(i), CopyCounter());
+ map = map_t::MakePtr(map, std::move(update));
+ REQUIRE(map);
+ }
+
+ for (int i{0}; i < kCount; ++i) {
+ auto key = std::to_string(i);
+ REQUIRE(map->contains(key));
+ CHECK(map->at(key).Count() == 0);
+ }
+ }
+
+ SECTION("Via linked map ptr") {
+ for (int i{0}; i < kCount; ++i) {
+ auto update = map_t::MakePtr(std::to_string(i), CopyCounter{});
+ map = map_t::MakePtr(map, std::move(update));
+ REQUIRE(map);
+ }
+
+ for (int i{0}; i < kCount; ++i) {
+ auto key = std::to_string(i);
+ REQUIRE(map->contains(key));
+ CHECK(map->at(key).Count() == 0);
+ }
+ }
+}
+
+// Custom container that holds a LinkedMap.
+class CustomContainer {
+ public:
+ class Ptr;
+ using linked_map_t = LinkedMap<int, int, Ptr>;
+
+ // Special smart pointer for container that can be used as internal NextPtr
+ // for LinkedMap by implementing IsNotNull(), LinkedMap(), and Make().
+ class Ptr : public std::shared_ptr<CustomContainer> {
+ public:
+ [[nodiscard]] auto IsNotNull() const noexcept -> bool {
+ return static_cast<bool>(*this);
+ }
+ [[nodiscard]] auto LinkedMap() const& -> linked_map_t const& {
+ return (*this)->Map();
+ }
+ [[nodiscard]] static auto Make(linked_map_t&& map) -> Ptr {
+ return Ptr{std::make_shared<CustomContainer>(std::move(map))};
+ }
+ };
+
+ explicit CustomContainer(linked_map_t&& map) noexcept
+ : map_{std::move(map)} {}
+ [[nodiscard]] auto Map() & noexcept -> linked_map_t& { return map_; }
+
+ private:
+ linked_map_t map_{};
+};
+
+TEST_CASE("Custom NextPtr", "[linked_map]") {
+ using map_t = LinkedMap<int, int, CustomContainer::Ptr>;
+ constexpr int kCount{100};
+ constexpr int kQ{10};
+
+ auto container = CustomContainer::Ptr::Make(map_t{0, 0});
+ REQUIRE(container);
+ CHECK(container->Map().size() == 1);
+
+ for (int i{1}; i < kCount; ++i) {
+ container = CustomContainer::Ptr::Make(map_t{container, {{i / kQ, i}}});
+ REQUIRE(container);
+ CHECK(container->Map().size() == static_cast<std::size_t>(i / kQ + 1));
+ }
+
+ for (int i{0}; i < kCount / kQ; ++i) {
+ auto key = i;
+ // kQ-many values per key: i -> i*kQ + [0;kQ-1], expect last
+ auto expect = i * kQ + (kQ - 1);
+ CHECK(container->Map().contains(key));
+ CHECK(container->Map().at(key) == expect);
+ }
+}
+
+TEST_CASE("Hash computation", "[linked_map]") {
+ using map_t = LinkedMap<std::string, int>;
+
+ auto map = map_t::MakePtr("foo", 4711); // NOLINT
+ REQUIRE(map);
+ CHECK(not map->empty());
+
+ auto map_hash = std::hash<LinkedMap<std::string, int>>{}(*map);
+ CHECK_FALSE(map_hash == 0);
+
+ auto ptr_hash = std::hash<LinkedMapPtr<std::string, int>>{}(map);
+ CHECK_FALSE(ptr_hash == 0);
+ CHECK(ptr_hash == map_hash);
+
+ map = map_t::MakePtr(map, "foo", 4711); // NOLINT
+ auto dup_hash = std::hash<LinkedMapPtr<std::string, int>>{}(map);
+ CHECK_FALSE(dup_hash == 0);
+ CHECK(dup_hash == map_hash);
+
+ map = map_t::MakePtr(map, "bar", 4712); // NOLINT
+ auto upd_hash = std::hash<LinkedMapPtr<std::string, int>>{}(map);
+ CHECK_FALSE(upd_hash == 0);
+ CHECK_FALSE(upd_hash == map_hash);
+}
diff --git a/test/buildtool/build_engine/target_map/TARGETS b/test/buildtool/build_engine/target_map/TARGETS
new file mode 100644
index 00000000..62cfae4c
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/TARGETS
@@ -0,0 +1,72 @@
+{ "result_map":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["result_map"]
+ , "srcs": ["result_map.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["src/buildtool/build_engine/target_map", "result_map"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "target_map"]
+ }
+, "target_map":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["target_map"]
+ , "srcs": ["target_map.test.cpp"]
+ , "data": ["test_data"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/build_engine/base_maps", "directory_map"]
+ , ["src/buildtool/build_engine/base_maps", "rule_map"]
+ , ["src/buildtool/build_engine/base_maps", "source_map"]
+ , ["src/buildtool/build_engine/base_maps", "targets_file_map"]
+ , ["src/buildtool/build_engine/target_map", "target_map"]
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "target_map"]
+ }
+, "test_data":
+ { "type": ["@", "rules", "data", "staged"]
+ , "srcs":
+ [ "data_src/a/b/targets_here/c/d/foo"
+ , "data_src/file_reference/hello.txt"
+ , "data_src/foo"
+ , "data_src/simple_rules/implicit_script.sh"
+ , "data_src/simple_targets/bar.txt"
+ , "data_src/simple_targets/baz.txt"
+ , "data_src/simple_targets/foo.txt"
+ , "data_src/tree/foo.txt"
+ , "data_src/tree/tree/foo.txt"
+ , "data_src/x/foo"
+ , "data_src/x/x/foo"
+ , "data_src/x/x/x/foo"
+ , "data_src/x/x/x/x/foo"
+ , "data_src/x/x/x/x/x/foo"
+ , "data_targets/TARGETS"
+ , "data_targets/a/b/targets_here/TARGETS"
+ , "data_targets/bad_targets/TARGETS"
+ , "data_targets/config_targets/TARGETS"
+ , "data_targets/file_reference/TARGETS"
+ , "data_targets/result/TARGETS"
+ , "data_targets/simple_rules/TARGETS"
+ , "data_targets/simple_targets/TARGETS"
+ , "data_targets/tree/TARGETS"
+ , "data_targets/x/TARGETS"
+ , "data_targets/x/x/TARGETS"
+ , "data_targets/x/x/x/TARGETS"
+ , "data_targets/x/x/x/x/TARGETS"
+ , "data_targets/x/x/x/x/x/TARGETS"
+ , "data_rules/result/RULES"
+ , "data_rules/rule/RULES"
+ , "data_rules/simple_rules/RULES"
+ , "data_rules/tree/RULES"
+ ]
+ , "stage": ["test", "buildtool", "build_engine", "target_map"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps": ["result_map", "target_map"]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/build_engine/target_map/data_rules/result/RULES b/test/buildtool/build_engine/target_map/data_rules/result/RULES
new file mode 100644
index 00000000..6d9affe5
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_rules/result/RULES
@@ -0,0 +1,153 @@
+{ "wrong RESULT":
+ { "string_fields":
+ [ "runfiles"
+ , "runfiles_entry"
+ , "artifacts"
+ , "artifacts_entry"
+ , "provides"
+ ]
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "artifact"
+ , { "type": "BLOB"
+ }
+ ]
+ ]
+ , "body":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "if"
+ , "cond":
+ { "type": "FIELD"
+ , "name": "artifacts"
+ }
+ , "then":
+ [ { "type": "join"
+ , "$1":
+ [ "artifacts"
+ , "not"
+ , "a"
+ , "map"
+ ]
+ , "separator": "-"
+ }
+ ]
+ , "else":
+ { "type": "if"
+ , "cond":
+ { "type": "FIELD"
+ , "name": "artifacts_entry"
+ }
+ , "then":
+ { "type": "singleton_map"
+ , "key":
+ { "type": "join"
+ , "$1":
+ [ "bad"
+ , "artifact"
+ , "path"
+ ]
+ , "separator": "-"
+ }
+ , "value":
+ { "type": "join"
+ , "$1":
+ [ "bad"
+ , "artifact"
+ , "entry"
+ ]
+ , "separator": "-"
+ }
+ }
+ , "else":
+ { "type": "singleton_map"
+ , "key": "OK"
+ , "value":
+ { "type": "var"
+ , "name": "artifact"
+ }
+ }
+ }
+ }
+ , "runfiles":
+ { "type": "if"
+ , "cond":
+ { "type": "FIELD"
+ , "name": "runfiles"
+ }
+ , "then":
+ [ { "type": "join"
+ , "$1":
+ [ "runfiles"
+ , "not"
+ , "a"
+ , "map"
+ ]
+ , "separator": "-"
+ }
+ ]
+ , "else":
+ { "type": "if"
+ , "cond":
+ { "type": "FIELD"
+ , "name": "runfiles_entry"
+ }
+ , "then":
+ { "type": "singleton_map"
+ , "key":
+ { "type": "join"
+ , "$1":
+ [ "bad"
+ , "runfiles"
+ , "path"
+ ]
+ , "separator": "-"
+ }
+ , "value":
+ { "type": "join"
+ , "$1":
+ [ "bad"
+ , "runfiles"
+ , "entry"
+ ]
+ , "separator": "-"
+ }
+ }
+ , "else":
+ { "type": "singleton_map"
+ , "key": "OK"
+ , "value":
+ { "type": "var"
+ , "name": "artifact"
+ }
+ }
+ }
+ }
+ , "provides":
+ { "type": "if"
+ , "cond":
+ { "type": "FIELD"
+ , "name": "provides"
+ }
+ , "then":
+ [ { "type": "join"
+ , "$1":
+ [ "provides"
+ , "not"
+ , "a"
+ , "map"
+ ]
+ , "separator": "-"
+ }
+ ]
+ , "else":
+ { "type": "singleton_map"
+ , "key": "OK"
+ , "value": "OK value"
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/test/buildtool/build_engine/target_map/data_rules/rule/RULES b/test/buildtool/build_engine/target_map/data_rules/rule/RULES
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_rules/rule/RULES
@@ -0,0 +1 @@
+{}
diff --git a/test/buildtool/build_engine/target_map/data_rules/simple_rules/RULES b/test/buildtool/build_engine/target_map/data_rules/simple_rules/RULES
new file mode 100644
index 00000000..749010ed
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_rules/simple_rules/RULES
@@ -0,0 +1,293 @@
+{ "just provide":
+ { "expression":
+ { "type": "RESULT"
+ , "provides":
+ { "type": "singleton_map"
+ , "key": "foo"
+ , "value": "bar"
+ }
+ }
+ }
+, "provide variable FOO":
+ { "config_vars":
+ [ "FOO"
+ ]
+ , "expression":
+ { "type": "RESULT"
+ , "provides":
+ { "type": "singleton_map"
+ , "key": "foo"
+ , "value":
+ { "type": "var"
+ , "name": "FOO"
+ }
+ }
+ }
+ }
+, "transition FOO":
+ { "config_fields":
+ [ "value"
+ ]
+ , "target_fields":
+ [ "deps"
+ ]
+ , "config_transitions":
+ { "deps":
+ [ { "type": "singleton_map"
+ , "key": "FOO"
+ , "value":
+ { "type": "join"
+ , "$1":
+ { "type": "FIELD"
+ , "name": "value"
+ }
+ }
+ }
+ ]
+ }
+ , "expression":
+ { "type": "RESULT"
+ , "provides":
+ { "type": "singleton_map"
+ , "key": "transitioned deps"
+ , "value":
+ { "type": "foreach"
+ , "var": "x"
+ , "range":
+ { "type": "FIELD"
+ , "name": "deps"
+ }
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep":
+ { "type": "var"
+ , "name": "x"
+ }
+ , "transition":
+ { "type": "singleton_map"
+ , "key": "FOO"
+ , "value":
+ { "type": "join"
+ , "$1":
+ { "type": "FIELD"
+ , "name": "value"
+ }
+ }
+ }
+ , "provider": "foo"
+ }
+ }
+ }
+ }
+ }
+, "collect deps":
+ { "target_fields":
+ [ "deps"
+ ]
+ , "expression":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range":
+ { "type": "FIELD"
+ , "name": "deps"
+ }
+ , "body":
+ { "type": "DEP_ARTIFACTS"
+ , "dep":
+ { "type": "var"
+ , "name": "x"
+ }
+ }
+ }
+ }
+ }
+ }
+, "collect deps as runfiles":
+ { "target_fields":
+ [ "deps"
+ ]
+ , "expression":
+ { "type": "RESULT"
+ , "runfiles":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range":
+ { "type": "FIELD"
+ , "name": "deps"
+ }
+ , "body":
+ { "type": "DEP_ARTIFACTS"
+ , "dep":
+ { "type": "var"
+ , "name": "x"
+ }
+ }
+ }
+ }
+ }
+ }
+, "text file":
+ { "string_fields":
+ [ "name"
+ , "content"
+ ]
+ , "target_fields":
+ [ "analyze"
+ ]
+ , "expression":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "singleton_map"
+ , "key":
+ { "type": "join"
+ , "$1":
+ { "type": "FIELD"
+ , "name": "name"
+ }
+ }
+ , "value":
+ { "type": "BLOB"
+ , "data":
+ { "type": "join"
+ , "$1":
+ { "type": "FIELD"
+ , "name": "content"
+ }
+ }
+ }
+ }
+ }
+ }
+, "implicit file":
+ { "implicit":
+ { "script":
+ [ "implicit_script.sh"
+ ]
+ }
+ , "expression":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range":
+ { "type": "FIELD"
+ , "name": "script"
+ }
+ , "body":
+ { "type": "DEP_ARTIFACTS"
+ , "dep":
+ { "type": "var"
+ , "name": "x"
+ }
+ }
+ }
+ }
+ }
+ }
+, "upper case":
+ { "target_fields":
+ [ "srcs"
+ ]
+ , "expression":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "input"
+ , "range":
+ { "type": "FIELD"
+ , "name": "srcs"
+ }
+ , "body":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "let*"
+ , "bindings":
+ [ [ "input_artifacts"
+ , { "type": "DEP_ARTIFACTS"
+ , "dep":
+ { "type": "var"
+ , "name": "input"
+ }
+ }
+ ]
+ , [ "names"
+ , { "type": "keys"
+ , "$1":
+ { "type": "var"
+ , "name": "input_artifacts"
+ }
+ }
+ ]
+ ]
+ , "body":
+ { "type": "foreach"
+ , "var": "x"
+ , "range":
+ { "type": "var"
+ , "name": "names"
+ }
+ , "body":
+ { "type": "let*"
+ , "bindings":
+ [ [ "upper"
+ , { "type": "ACTION"
+ , "inputs":
+ { "type": "singleton_map"
+ , "key": "in"
+ , "value":
+ { "type": "lookup"
+ , "map":
+ { "type": "var"
+ , "name": "input_artifacts"
+ }
+ , "key":
+ { "type": "var"
+ , "name": "x"
+ }
+ }
+ }
+ , "outs":
+ [ "out"
+ ]
+ , "cmd":
+ [ "/bin/sh"
+ , "-c"
+ , "tr 'a-z' 'A-Z' < in > out"
+ ]
+ }
+ ]
+ ]
+ , "body":
+ { "type": "singleton_map"
+ , "key":
+ { "type": "var"
+ , "name": "x"
+ }
+ , "value":
+ { "type": "lookup"
+ , "map":
+ { "type": "var"
+ , "name": "upper"
+ }
+ , "key": "out"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/test/buildtool/build_engine/target_map/data_rules/tree/RULES b/test/buildtool/build_engine/target_map/data_rules/tree/RULES
new file mode 100644
index 00000000..66fedde9
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_rules/tree/RULES
@@ -0,0 +1,87 @@
+{ "ls -R":
+ { "target_fields":
+ [ "tree"
+ , "direct"
+ ]
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ [ "tree"
+ , { "type": "TREE"
+ , "$1":
+ { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "dep"
+ , "range":
+ { "type": "FIELD"
+ , "name": "tree"
+ }
+ , "body":
+ { "type": "DEP_RUNFILES"
+ , "dep":
+ { "type": "var"
+ , "name": "dep"
+ }
+ }
+ }
+ }
+ }
+ ]
+ , [ "direct"
+ , { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "dep"
+ , "range":
+ { "type": "FIELD"
+ , "name": "direct"
+ }
+ , "body":
+ { "type": "DEP_RUNFILES"
+ , "dep":
+ { "type": "var"
+ , "name": "dep"
+ }
+ }
+ }
+ }
+ ]
+ , [ "inputs"
+ , { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "tree"
+ , "value":
+ { "type": "var"
+ , "name": "tree"
+ }
+ }
+ , { "type": "var"
+ , "name": "direct"
+ }
+ ]
+ }
+ ]
+ ]
+ , "body":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "ACTION"
+ , "outs":
+ [ "_out"
+ ]
+ , "inputs":
+ { "type": "var"
+ , "name": "inputs"
+ }
+ , "cmd":
+ [ "sh"
+ , "-c"
+ , "find . -name '*.txt' > _out"
+ ]
+ }
+ }
+ }
+ }
+}
diff --git a/test/buildtool/build_engine/target_map/data_src/a/b/targets_here/c/d/foo b/test/buildtool/build_engine/target_map/data_src/a/b/targets_here/c/d/foo
new file mode 100644
index 00000000..257cc564
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/a/b/targets_here/c/d/foo
@@ -0,0 +1 @@
+foo
diff --git a/test/buildtool/build_engine/target_map/data_src/file_reference/hello.txt b/test/buildtool/build_engine/target_map/data_src/file_reference/hello.txt
new file mode 100644
index 00000000..3b18e512
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/file_reference/hello.txt
@@ -0,0 +1 @@
+hello world
diff --git a/test/buildtool/build_engine/target_map/data_src/foo b/test/buildtool/build_engine/target_map/data_src/foo
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/foo
diff --git a/test/buildtool/build_engine/target_map/data_src/simple_rules/implicit_script.sh b/test/buildtool/build_engine/target_map/data_src/simple_rules/implicit_script.sh
new file mode 100644
index 00000000..26b677ee
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/simple_rules/implicit_script.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+echo Hello World
diff --git a/test/buildtool/build_engine/target_map/data_src/simple_targets/bar.txt b/test/buildtool/build_engine/target_map/data_src/simple_targets/bar.txt
new file mode 100644
index 00000000..5716ca59
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/simple_targets/bar.txt
@@ -0,0 +1 @@
+bar
diff --git a/test/buildtool/build_engine/target_map/data_src/simple_targets/baz.txt b/test/buildtool/build_engine/target_map/data_src/simple_targets/baz.txt
new file mode 100644
index 00000000..76018072
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/simple_targets/baz.txt
@@ -0,0 +1 @@
+baz
diff --git a/test/buildtool/build_engine/target_map/data_src/simple_targets/foo.txt b/test/buildtool/build_engine/target_map/data_src/simple_targets/foo.txt
new file mode 100644
index 00000000..257cc564
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/simple_targets/foo.txt
@@ -0,0 +1 @@
+foo
diff --git a/test/buildtool/build_engine/target_map/data_src/tree/foo.txt b/test/buildtool/build_engine/target_map/data_src/tree/foo.txt
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/tree/foo.txt
diff --git a/test/buildtool/build_engine/target_map/data_src/tree/tree/foo.txt b/test/buildtool/build_engine/target_map/data_src/tree/tree/foo.txt
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/tree/tree/foo.txt
diff --git a/test/buildtool/build_engine/target_map/data_src/x/foo b/test/buildtool/build_engine/target_map/data_src/x/foo
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/x/foo
diff --git a/test/buildtool/build_engine/target_map/data_src/x/x/foo b/test/buildtool/build_engine/target_map/data_src/x/x/foo
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/x/x/foo
diff --git a/test/buildtool/build_engine/target_map/data_src/x/x/x/foo b/test/buildtool/build_engine/target_map/data_src/x/x/x/foo
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/x/x/x/foo
diff --git a/test/buildtool/build_engine/target_map/data_src/x/x/x/x/foo b/test/buildtool/build_engine/target_map/data_src/x/x/x/x/foo
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/x/x/x/x/foo
diff --git a/test/buildtool/build_engine/target_map/data_src/x/x/x/x/x/foo b/test/buildtool/build_engine/target_map/data_src/x/x/x/x/x/foo
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_src/x/x/x/x/x/foo
diff --git a/test/buildtool/build_engine/target_map/data_targets/TARGETS b/test/buildtool/build_engine/target_map/data_targets/TARGETS
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/TARGETS
@@ -0,0 +1 @@
+{}
diff --git a/test/buildtool/build_engine/target_map/data_targets/a/b/targets_here/TARGETS b/test/buildtool/build_engine/target_map/data_targets/a/b/targets_here/TARGETS
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/a/b/targets_here/TARGETS
@@ -0,0 +1 @@
+{}
diff --git a/test/buildtool/build_engine/target_map/data_targets/bad_targets/TARGETS b/test/buildtool/build_engine/target_map/data_targets/bad_targets/TARGETS
new file mode 100644
index 00000000..3e2a295a
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/bad_targets/TARGETS
@@ -0,0 +1,16 @@
+{ "string field":
+ { "type": ["simple_rules", "text file"]
+ , "name": "PlAiN sTrInG"
+ , "content": ["This is FOO!"]
+ }
+, "string field 2":
+ { "type": ["simple_rules", "text file"]
+ , "name": ["OK", 4711, "OK"]
+ , "content": ["This is FOO!"]
+ }
+, "config field":
+ { "type": ["simple_rules", "transition FOO"]
+ , "value": [{"type": "singleton_map", "key": "FooKey", "value": "BarValue"}]
+ , "deps": [["siple_targets", "rule provides FOO"]]
+ }
+}
diff --git a/test/buildtool/build_engine/target_map/data_targets/config_targets/TARGETS b/test/buildtool/build_engine/target_map/data_targets/config_targets/TARGETS
new file mode 100644
index 00000000..4068efdf
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/config_targets/TARGETS
@@ -0,0 +1,27 @@
+{ "indirect dependency":
+ { "type":
+ [ "simple_rules"
+ , "upper case"
+ ]
+ , "srcs":
+ [ "use foo"
+ ]
+ }
+, "use foo":
+ { "type":
+ [ "simple_rules"
+ , "text file"
+ ]
+ , "arguments_config":
+ [ "foo"
+ ]
+ , "name":
+ [ "foo.txt."
+ ]
+ , "content":
+ [ { "type": "var"
+ , "name": "foo"
+ }
+ ]
+ }
+}
diff --git a/test/buildtool/build_engine/target_map/data_targets/file_reference/TARGETS b/test/buildtool/build_engine/target_map/data_targets/file_reference/TARGETS
new file mode 100644
index 00000000..5e62ec6b
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/file_reference/TARGETS
@@ -0,0 +1,9 @@
+{ "_hello.txt" : { "type" : "install"
+ , "files" : { "raw_data/hello.txt" : ["FILE", null, "hello.txt"] }
+ }
+, "hello.txt" : { "type" : "generic"
+ , "deps" : ["_hello.txt"]
+ , "outs" : ["hello.txt"]
+ , "cmds" : ["cat raw_data/hello.txt | tr 'a-z' 'A-Z' > hello.txt"]
+ }
+}
diff --git a/test/buildtool/build_engine/target_map/data_targets/result/TARGETS b/test/buildtool/build_engine/target_map/data_targets/result/TARGETS
new file mode 100644
index 00000000..e58b1ed5
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/result/TARGETS
@@ -0,0 +1,46 @@
+{ "artifacts":
+ { "type":
+ [ "result"
+ , "wrong RESULT"
+ ]
+ , "artifacts":
+ [ "YES"
+ ]
+ }
+, "artifacts entry":
+ { "type":
+ [ "result"
+ , "wrong RESULT"
+ ]
+ , "artifacts_entry":
+ [ "YES"
+ ]
+ }
+, "runfiles":
+ { "type":
+ [ "result"
+ , "wrong RESULT"
+ ]
+ , "runfiles":
+ [ "YES"
+ ]
+ }
+, "runfiles entry":
+ { "type":
+ [ "result"
+ , "wrong RESULT"
+ ]
+ , "runfiles_entry":
+ [ "YES"
+ ]
+ }
+, "provides":
+ { "type":
+ [ "result"
+ , "wrong RESULT"
+ ]
+ , "provides":
+ [ "YES"
+ ]
+ }
+}
diff --git a/test/buildtool/build_engine/target_map/data_targets/simple_rules/TARGETS b/test/buildtool/build_engine/target_map/data_targets/simple_rules/TARGETS
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/simple_rules/TARGETS
@@ -0,0 +1 @@
+{}
diff --git a/test/buildtool/build_engine/target_map/data_targets/simple_targets/TARGETS b/test/buildtool/build_engine/target_map/data_targets/simple_targets/TARGETS
new file mode 100644
index 00000000..1697629b
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/simple_targets/TARGETS
@@ -0,0 +1,156 @@
+{ "rule just provides":
+ { "type":
+ [ "simple_rules"
+ , "just provide"
+ ]
+ }
+, "rule provides FOO":
+ { "type":
+ [ "simple_rules"
+ , "provide variable FOO"
+ ]
+ }
+, "config transition for FOO":
+ { "type":
+ [ "simple_rules"
+ , "transition FOO"
+ ]
+ , "value":
+ [ "bar"
+ , "baz"
+ ]
+ , "deps":
+ [ "rule provides FOO"
+ ]
+ }
+, "collect dep artifacts":
+ { "type":
+ [ "simple_rules"
+ , "collect deps"
+ ]
+ , "deps":
+ [ "foo.txt"
+ , "bar.txt"
+ , "baz.txt"
+ ]
+ }
+, "collect as runfiles":
+ { "type":
+ [ "simple_rules"
+ , "collect deps as runfiles"
+ ]
+ , "deps":
+ [ "foo.txt"
+ , "bar.txt"
+ , "baz.txt"
+ ]
+ }
+, "stage blob":
+ { "type":
+ [ "simple_rules"
+ , "text file"
+ ]
+ , "name":
+ [ "foo.txt"
+ ]
+ , "content":
+ [ "This is FOO!"
+ ]
+ }
+, "use implicit":
+ { "type":
+ [ "simple_rules"
+ , "implicit file"
+ ]
+ }
+, "actions":
+ { "type":
+ [ "simple_rules"
+ , "upper case"
+ ]
+ , "srcs":
+ [ "foo.txt"
+ , "bar.txt"
+ ]
+ }
+, "artifact names":
+ { "type":
+ [ "simple_rules"
+ , "text file"
+ ]
+ , "name":
+ [ "index.txt"
+ ]
+ , "content":
+ [ { "type": "join"
+ , "separator": ";"
+ , "$1":
+ { "type": "outs"
+ , "dep": "collect dep artifacts"
+ }
+ }
+ ]
+ , "analyze":
+ [ "collect dep artifacts"
+ ]
+ }
+, "runfile names":
+ { "type":
+ [ "simple_rules"
+ , "text file"
+ ]
+ , "name":
+ [ "index.txt"
+ ]
+ , "content":
+ [ { "type": "join"
+ , "separator": ";"
+ , "$1":
+ { "type": "runfiles"
+ , "dep": "collect as runfiles"
+ }
+ }
+ ]
+ , "analyze":
+ [ "collect as runfiles"
+ ]
+ }
+, "use generic":
+ { "type": "generic"
+ , "deps":
+ [ "foo.txt"
+ , "bar.txt"
+ ]
+ , "cmds":
+ [ "cat foo.txt bar.txt > out"
+ , "echo 'DONE' >> out"
+ ]
+ , "outs":
+ [ "out"
+ ]
+ }
+, "install":
+ { "type": "install"
+ , "deps":
+ [ "foo.txt"
+ , "bar.txt"
+ ]
+ , "files":
+ { "combined.txt": "use generic"
+ , "subdir/restaged.txt": "bar.txt"
+ }
+ , "dirs":
+ [ [ "collect as runfiles"
+ , "mix/in/this/subdir"
+ ]
+ , [ "runfile names"
+ , "mix/in/this/subdir"
+ ]
+ ]
+ }
+, "generate file":
+ { "type": "file_gen"
+ , "name": "generated.txt"
+ , "data": "Hello World!"
+ }
+}
diff --git a/test/buildtool/build_engine/target_map/data_targets/tree/TARGETS b/test/buildtool/build_engine/target_map/data_targets/tree/TARGETS
new file mode 100644
index 00000000..d08ebb47
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/tree/TARGETS
@@ -0,0 +1,23 @@
+{ "no conflict":
+ { "type":
+ [ "tree"
+ , "ls -R"
+ ]
+ , "tree":
+ [ [ "simple_targets"
+ , "collect as runfiles"
+ ]
+ ]
+ , "direct":
+ [ "foo.txt"
+ ]
+ }
+, "range conflict":
+ { "type":
+ [ "tree"
+ , "ls -R"
+ ]
+ , "tree": []
+ , "direct": ["tree/foo.txt"]
+ }
+}
diff --git a/test/buildtool/build_engine/target_map/data_targets/x/TARGETS b/test/buildtool/build_engine/target_map/data_targets/x/TARGETS
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/x/TARGETS
@@ -0,0 +1 @@
+{}
diff --git a/test/buildtool/build_engine/target_map/data_targets/x/x/TARGETS b/test/buildtool/build_engine/target_map/data_targets/x/x/TARGETS
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/x/x/TARGETS
@@ -0,0 +1 @@
+{}
diff --git a/test/buildtool/build_engine/target_map/data_targets/x/x/x/TARGETS b/test/buildtool/build_engine/target_map/data_targets/x/x/x/TARGETS
new file mode 100644
index 00000000..115eef20
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/x/x/x/TARGETS
@@ -0,0 +1,9 @@
+{ "addressing":
+ { "type": "install"
+ , "files":
+ { "absolute": [ "x/x" , "foo" ]
+ , "relative": [ "./" , "x/x" , "foo" ]
+ , "upwards": [ "./" , "../.." , "foo" ]
+ }
+ }
+}
diff --git a/test/buildtool/build_engine/target_map/data_targets/x/x/x/x/TARGETS b/test/buildtool/build_engine/target_map/data_targets/x/x/x/x/TARGETS
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/x/x/x/x/TARGETS
@@ -0,0 +1 @@
+{}
diff --git a/test/buildtool/build_engine/target_map/data_targets/x/x/x/x/x/TARGETS b/test/buildtool/build_engine/target_map/data_targets/x/x/x/x/x/TARGETS
new file mode 100644
index 00000000..0967ef42
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/data_targets/x/x/x/x/x/TARGETS
@@ -0,0 +1 @@
+{}
diff --git a/test/buildtool/build_engine/target_map/result_map.test.cpp b/test/buildtool/build_engine/target_map/result_map.test.cpp
new file mode 100644
index 00000000..14bf0638
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/result_map.test.cpp
@@ -0,0 +1,139 @@
+#include <cstdlib>
+#include <filesystem>
+#include <fstream>
+#include <string>
+#include <vector>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/target_map/result_map.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+
+namespace {
+
+[[nodiscard]] auto GetTestDir() -> std::filesystem::path {
+ auto* tmp_dir = std::getenv("TEST_TMPDIR");
+ if (tmp_dir != nullptr) {
+ return tmp_dir;
+ }
+ return FileSystemManager::GetCurrentDirectory() /
+ "test/buildtool/build_engine/target_map";
+}
+
+[[nodiscard]] auto CreateAnalysedTarget(
+ TargetResult const& result,
+ std::vector<ActionDescription> const& descs,
+ std::vector<std::string> const& blobs) -> AnalysedTargetPtr {
+ return std::make_shared<AnalysedTarget>(result,
+ descs,
+ blobs,
+ std::vector<Tree>(),
+ std::unordered_set<std::string>{},
+ std::set<std::string>{});
+}
+
+} // namespace
+
+TEST_CASE("empty map", "[result_map]") {
+ using BuildMaps::Target::ResultTargetMap;
+ ResultTargetMap map{0};
+
+ CHECK(map.ToResult().actions.empty());
+ CHECK(map.ToResult().blobs.empty());
+
+ CHECK(map.ToJson() == R"({"actions": {}, "blobs": [], "trees": {}})"_json);
+
+ auto filename = (GetTestDir() / "test_empty.graph").string();
+ map.ToFile(filename);
+ std::ifstream file(filename);
+ nlohmann::json from_file{};
+ file >> from_file;
+ CHECK(from_file == R"({"actions": {}, "blobs": [], "trees": {}})"_json);
+}
+
+TEST_CASE("origins creation", "[result_map]") {
+ using BuildMaps::Base::EntityName;
+ using BuildMaps::Target::ResultTargetMap;
+
+ auto foo =
+ ActionDescription{{}, {}, Action{"run_foo", {"touch", "foo"}, {}}, {}};
+ auto bar =
+ ActionDescription{{}, {}, Action{"run_bar", {"touch", "bar"}, {}}, {}};
+ auto baz =
+ ActionDescription{{}, {}, Action{"run_baz", {"touch", "baz"}, {}}, {}};
+
+ ResultTargetMap map{0};
+ CHECK(map.Add(EntityName{"", ".", "foobar"},
+ {},
+ CreateAnalysedTarget(
+ {}, std::vector<ActionDescription>{foo, bar}, {})));
+ CHECK(map.Add(
+ EntityName{"", ".", "baz"},
+ {},
+ CreateAnalysedTarget({}, std::vector<ActionDescription>{baz}, {})));
+
+ auto result = map.ToResult();
+ REQUIRE(result.actions.size() == 3);
+ CHECK(result.blobs.empty());
+
+ auto expect_foo = foo.ToJson();
+ auto expect_bar = bar.ToJson();
+ auto expect_baz = baz.ToJson();
+ CHECK(map.ToJson() == nlohmann::json{{"actions",
+ {{foo.Id(), expect_foo},
+ {bar.Id(), expect_bar},
+ {baz.Id(), expect_baz}}},
+ {"blobs", nlohmann::json::array()},
+ {"trees", nlohmann::json::object()}});
+
+ expect_foo["origins"] =
+ R"([{"target": ["@", "", "", "foobar"], "config": {}, "subtask":
+ 0}])"_json;
+ expect_bar["origins"] =
+ R"([{"target": ["@", "", "", "foobar"], "config": {}, "subtask":
+ 1}])"_json;
+ expect_baz["origins"] =
+ R"([{"target": ["@", "", "", "baz"], "config": {}, "subtask":
+ 0}])"_json;
+
+ auto filename = (GetTestDir() / "test_with_origins.graph").string();
+ map.ToFile(filename);
+ std::ifstream file(filename);
+ nlohmann::json from_file{};
+ file >> from_file;
+ CHECK(from_file == nlohmann::json{{"actions",
+ {{foo.Id(), expect_foo},
+ {bar.Id(), expect_bar},
+ {baz.Id(), expect_baz}}},
+ {"blobs", nlohmann::json::array()},
+ {"trees", nlohmann::json::object()}});
+}
+
+TEST_CASE("blobs uniqueness", "[result_map]") {
+ using BuildMaps::Base::EntityName;
+ using BuildMaps::Target::ResultTargetMap;
+
+ ResultTargetMap map{0};
+ CHECK(map.Add(EntityName{"", ".", "foobar"},
+ {},
+ CreateAnalysedTarget({}, {}, {"foo", "bar"})));
+ CHECK(map.Add(EntityName{"", ".", "barbaz"},
+ {},
+ CreateAnalysedTarget({}, {}, {"bar", "baz"})));
+
+ auto result = map.ToResult();
+ CHECK(result.actions.empty());
+ CHECK(result.blobs.size() == 3);
+
+ CHECK(map.ToJson() == nlohmann::json{{"actions", nlohmann::json::object()},
+ {"blobs", {"bar", "baz", "foo"}},
+ {"trees", nlohmann::json::object()}});
+
+ auto filename = (GetTestDir() / "test_unique_blobs.graph").string();
+ map.ToFile</*kIncludeOrigins=*/false>(filename);
+ std::ifstream file(filename);
+ nlohmann::json from_file{};
+ file >> from_file;
+ CHECK(from_file == nlohmann::json{{"actions", nlohmann::json::object()},
+ {"blobs", {"bar", "baz", "foo"}},
+ {"trees", nlohmann::json::object()}});
+}
diff --git a/test/buildtool/build_engine/target_map/target_map.test.cpp b/test/buildtool/build_engine/target_map/target_map.test.cpp
new file mode 100644
index 00000000..a711eef3
--- /dev/null
+++ b/test/buildtool/build_engine/target_map/target_map.test.cpp
@@ -0,0 +1,914 @@
+#include "catch2/catch.hpp"
+#include "src/buildtool/build_engine/base_maps/directory_map.hpp"
+#include "src/buildtool/build_engine/base_maps/entity_name.hpp"
+#include "src/buildtool/build_engine/base_maps/expression_map.hpp"
+#include "src/buildtool/build_engine/base_maps/rule_map.hpp"
+#include "src/buildtool/build_engine/base_maps/source_map.hpp"
+#include "src/buildtool/build_engine/base_maps/targets_file_map.hpp"
+#include "src/buildtool/build_engine/expression/expression.hpp"
+#include "src/buildtool/build_engine/target_map/target_map.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+
+namespace {
+
+using none_t = Expression::none_t;
+
+void SetupConfig() {
+ auto info = RepositoryConfig::RepositoryInfo{
+ FileRoot{"test/buildtool/build_engine/target_map/data_src"},
+ FileRoot{"test/buildtool/build_engine/target_map/data_targets"},
+ FileRoot{"test/buildtool/build_engine/target_map/data_rules"},
+ FileRoot{"test/buildtool/build_engine/target_map/data_expr"}};
+ RepositoryConfig::Instance().Reset();
+ RepositoryConfig::Instance().SetInfo("", std::move(info));
+}
+
+} // namespace
+
+TEST_CASE("simple targets") {
+ SetupConfig();
+ auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap();
+ auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries);
+ auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(0);
+ auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(0);
+ static auto expressions_file_map =
+ BuildMaps::Base::CreateExpressionFileMap(0);
+ auto expr_map = BuildMaps::Base::CreateExpressionMap(&expressions_file_map);
+ auto rule_map = BuildMaps::Base::CreateRuleMap(&rule_file_map, &expr_map);
+ BuildMaps::Target::ResultTargetMap result_map{0};
+ auto target_map = BuildMaps::Target::CreateTargetMap(
+ &source, &targets_file_map, &rule_map, &result_map);
+
+ AnalysedTargetPtr result;
+ bool error{false};
+ std::string error_msg;
+ auto empty_config = Configuration{Expression::FromJson(R"({})"_json)};
+
+ SECTION("Actual source file") {
+ {
+ error_msg = "NONE";
+ error = false;
+
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "a/b/targets_here", "c/d/foo"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(!error);
+ CHECK(error_msg == "NONE");
+ auto artifacts = result->Artifacts();
+ ExpressionPtr artifact = artifacts->Get("c/d/foo", none_t{});
+ CHECK(artifact->IsArtifact());
+ }
+
+ SECTION("No targets file here") {
+ {
+ error_msg = "NONE";
+ error = false;
+
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "a/b/targets_here/c", "d/foo"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(error);
+ CHECK(error_msg != "NONE");
+ }
+
+ SECTION("Rule just provides") {
+ {
+ error_msg = "NONE";
+ error = false;
+ result = nullptr;
+
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "rule just provides"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(not error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Provides() ==
+ Expression::FromJson(R"({"foo": "bar"})"_json));
+ }
+
+ SECTION("Rule provides variable, but unset") {
+ {
+ error_msg = "NONE";
+ error = false;
+ result = nullptr;
+
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "rule provides FOO"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(not error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Provides() ==
+ Expression::FromJson(R"({"foo": null})"_json));
+ }
+
+ SECTION("Rule provides variable, set in config") {
+ {
+ error_msg = "NONE";
+ error = false;
+ result = nullptr;
+ auto config = Configuration{
+ Expression::FromJson(R"({"FOO": "foobar"})"_json)};
+
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "rule provides FOO"},
+ config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(not error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Provides() ==
+ Expression::FromJson(R"({"foo": "foobar"})"_json));
+ }
+
+ SECTION("Rule provides variable, set via config transition") {
+ {
+ error_msg = "NONE";
+ error = false;
+ result = nullptr;
+ auto config = Configuration{
+ Expression::FromJson(R"({"FOO": "foobar"})"_json)};
+
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "config transition for FOO"},
+ config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(not error);
+ CHECK(error_msg == "NONE");
+ CHECK(
+ result->Provides() ==
+ Expression::FromJson(R"({"transitioned deps": ["barbaz"]})"_json));
+ }
+
+ SECTION("Rule collects dependency artifacts") {
+ {
+ error_msg = "NONE";
+ error = false;
+ result = nullptr;
+
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "collect dep artifacts"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(not error);
+ CHECK(error_msg == "NONE");
+ // Look into the internals of the artifacts by using the json
+ // representation
+ auto artifacts_desc = result->Artifacts()->ToJson();
+ CHECK(artifacts_desc["foo.txt"]["data"]["path"] ==
+ "simple_targets/foo.txt");
+ CHECK(artifacts_desc["bar.txt"]["data"]["path"] ==
+ "simple_targets/bar.txt");
+ CHECK(artifacts_desc["baz.txt"]["data"]["path"] ==
+ "simple_targets/baz.txt");
+ }
+
+ SECTION("Rule stages blob") {
+ {
+ error_msg = "NONE";
+ error = false;
+ result = nullptr;
+
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "stage blob"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(not error);
+ CHECK(error_msg == "NONE");
+ auto blobs = result->Blobs();
+ CHECK(blobs.size() == 1);
+ CHECK(blobs[0] == "This is FOO!");
+ auto artifacts_desc = result->Artifacts()->ToJson();
+ CHECK(artifacts_desc["foo.txt"]["type"] == "KNOWN");
+ }
+
+ SECTION("Stage implicit target") {
+ {
+ error_msg = "NONE";
+ error = false;
+ result = nullptr;
+
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "use implicit"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(not error);
+ CHECK(error_msg == "NONE");
+ // Look into the internals of the artifacts by using the json
+ // representation
+ auto artifacts_desc = result->Artifacts()->ToJson();
+ CHECK(artifacts_desc["implicit_script.sh"]["data"]["path"] ==
+ "simple_rules/implicit_script.sh");
+ }
+
+ SECTION("simple actions") {
+ {
+ error_msg = "NONE";
+ error = false;
+ result = nullptr;
+
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "actions"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(not error);
+ CHECK(error_msg == "NONE");
+ // Look into the internals of the artifacts by using the json
+ // representation
+ auto artifacts_desc = result->Artifacts()->ToJson();
+ CHECK(artifacts_desc["foo.txt"]["type"] == "ACTION");
+ CHECK(artifacts_desc["bar.txt"]["type"] == "ACTION");
+ // We have a deterministic evaluation order, so the order of the actions
+ // in the vector is guaranteed. The test rule generates the action by
+ // iterating over the "srcs" field, so we get the actions in the order
+ // of that field, not in alphabetical order.
+ CHECK(result->Actions()[0].ToJson()["input"]["in"]["data"]["path"] ==
+ "simple_targets/foo.txt");
+ CHECK(result->Actions()[1].ToJson()["input"]["in"]["data"]["path"] ==
+ "simple_targets/bar.txt");
+ }
+}
+
+TEST_CASE("configuration deduplication") {
+ SetupConfig();
+ auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap();
+ auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries);
+ auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(0);
+ auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(0);
+ static auto expressions_file_map =
+ BuildMaps::Base::CreateExpressionFileMap(0);
+ auto expr_map = BuildMaps::Base::CreateExpressionMap(&expressions_file_map);
+ auto rule_map = BuildMaps::Base::CreateRuleMap(&rule_file_map, &expr_map);
+ BuildMaps::Target::ResultTargetMap result_map{0};
+ auto target_map = BuildMaps::Target::CreateTargetMap(
+ &source, &targets_file_map, &rule_map, &result_map);
+
+ std::vector<AnalysedTargetPtr> result;
+ bool error{false};
+ std::string error_msg = "NONE";
+ auto config = Configuration{Expression::FromJson(
+ R"({"foo" : "bar", "irrelevant": "ignore me"})"_json)};
+ auto alternative_config = Configuration{Expression::FromJson(
+ R"({"foo" : "bar", "irrelevant": "other value"})"_json)};
+ auto different_config =
+ Configuration{Expression::FromJson(R"({"foo" : "baz"})"_json)};
+
+ auto indirect_target = BuildMaps::Base::EntityName{
+ "", "config_targets", "indirect dependency"};
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{indirect_target, config},
+ BuildMaps::Target::ConfiguredTarget{indirect_target,
+ alternative_config},
+ BuildMaps::Target::ConfiguredTarget{indirect_target,
+ different_config}},
+ [&result](auto values) {
+ std::transform(values.begin(),
+ values.end(),
+ std::back_inserter(result),
+ [](auto* target) { return *target; });
+ },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(not error);
+ CHECK(error_msg == "NONE");
+ CHECK(result[0]->Artifacts() == result[1]->Artifacts());
+ CHECK(result[0]->Artifacts() != result[2]->Artifacts());
+ auto analysis_result = result_map.ToResult();
+ CHECK(analysis_result.actions.size() == 2);
+}
+
+TEST_CASE("generator functions in string arguments") {
+ SetupConfig();
+ auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap();
+ auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries);
+ auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(0);
+ auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(0);
+ static auto expressions_file_map =
+ BuildMaps::Base::CreateExpressionFileMap(0);
+ auto expr_map = BuildMaps::Base::CreateExpressionMap(&expressions_file_map);
+ auto rule_map = BuildMaps::Base::CreateRuleMap(&rule_file_map, &expr_map);
+ BuildMaps::Target::ResultTargetMap result_map{0};
+ auto target_map = BuildMaps::Target::CreateTargetMap(
+ &source, &targets_file_map, &rule_map, &result_map);
+
+ AnalysedTargetPtr result;
+ bool error{false};
+ std::string error_msg;
+ auto empty_config = Configuration{Expression::FromJson(R"({})"_json)};
+
+ SECTION("outs") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "artifact names"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(!error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Artifacts()->ToJson()["index.txt"]["type"] == "KNOWN");
+ CHECK(result->Blobs()[0] == "bar.txt;baz.txt;foo.txt");
+ }
+
+ SECTION("runfies") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "runfile names"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(!error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Artifacts()->ToJson()["index.txt"]["type"] == "KNOWN");
+ CHECK(result->Blobs()[0] == "bar.txt;baz.txt;foo.txt");
+ }
+}
+
+TEST_CASE("built-in rules") {
+ SetupConfig();
+ auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap();
+ auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries);
+ auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(0);
+ auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(0);
+ static auto expressions_file_map =
+ BuildMaps::Base::CreateExpressionFileMap(0);
+ auto expr_map = BuildMaps::Base::CreateExpressionMap(&expressions_file_map);
+ auto rule_map = BuildMaps::Base::CreateRuleMap(&rule_file_map, &expr_map);
+ BuildMaps::Target::ResultTargetMap result_map{0};
+ auto target_map = BuildMaps::Target::CreateTargetMap(
+ &source, &targets_file_map, &rule_map, &result_map);
+
+ AnalysedTargetPtr result;
+ bool error{false};
+ std::string error_msg;
+ auto empty_config = Configuration{Expression::FromJson(R"({})"_json)};
+
+ SECTION("generic") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "use generic"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(!error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Artifacts()->Map().size() == 1);
+ CHECK(result->Artifacts()->ToJson()["out"]["type"] == "ACTION");
+ CHECK(result->Artifacts()->ToJson()["out"]["data"]["path"] == "out");
+ }
+
+ SECTION("install") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "install"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(!error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Artifacts() == result->RunFiles());
+ auto stage = result->Artifacts()->ToJson();
+ CHECK(stage["foo.txt"]["type"] == "LOCAL");
+ CHECK(stage["foo.txt"]["data"]["path"] == "simple_targets/foo.txt");
+ CHECK(stage["bar.txt"]["type"] == "LOCAL");
+ CHECK(stage["bar.txt"]["data"]["path"] == "simple_targets/bar.txt");
+ CHECK(stage["combined.txt"]["type"] == "ACTION");
+ CHECK(stage["combined.txt"]["data"]["path"] == "out");
+ CHECK(stage["subdir/restaged.txt"]["type"] == "LOCAL");
+ CHECK(stage["subdir/restaged.txt"]["data"]["path"] ==
+ "simple_targets/bar.txt");
+ CHECK(stage["mix/in/this/subdir/foo.txt"]["data"]["path"] ==
+ "simple_targets/foo.txt");
+ CHECK(stage["mix/in/this/subdir/bar.txt"]["data"]["path"] ==
+ "simple_targets/bar.txt");
+ CHECK(stage["mix/in/this/subdir/baz.txt"]["data"]["path"] ==
+ "simple_targets/baz.txt");
+ CHECK(stage["mix/in/this/subdir/index.txt"]["type"] == "KNOWN");
+ }
+
+ SECTION("file_gen") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "simple_targets", "generate file"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(!error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Artifacts()->ToJson()["generated.txt"]["type"] ==
+ "KNOWN");
+ CHECK(result->Blobs().size() == 1);
+ CHECK(result->Blobs()[0] == "Hello World!");
+ }
+}
+
+TEST_CASE("target reference") {
+ SetupConfig();
+ auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap();
+ auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries);
+ auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(0);
+ auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(0);
+ static auto expressions_file_map =
+ BuildMaps::Base::CreateExpressionFileMap(0);
+ auto expr_map = BuildMaps::Base::CreateExpressionMap(&expressions_file_map);
+ auto rule_map = BuildMaps::Base::CreateRuleMap(&rule_file_map, &expr_map);
+ BuildMaps::Target::ResultTargetMap result_map{0};
+ auto target_map = BuildMaps::Target::CreateTargetMap(
+ &source, &targets_file_map, &rule_map, &result_map);
+
+ AnalysedTargetPtr result;
+ bool error{false};
+ std::string error_msg;
+ auto empty_config = Configuration{Expression::FromJson(R"({})"_json)};
+
+ SECTION("file vs target") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "file_reference", "hello.txt"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(!error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Artifacts()->ToJson()["hello.txt"]["type"] == "ACTION");
+ CHECK(result->Artifacts()->ToJson()["hello.txt"]["data"]["path"] ==
+ "hello.txt");
+
+ CHECK(result->Actions().size() == 1);
+ CHECK(result->Actions()[0]
+ .ToJson()["input"]["raw_data/hello.txt"]["type"] == "LOCAL");
+ CHECK(result->Actions()[0]
+ .ToJson()["input"]["raw_data/hello.txt"]["data"]["path"] ==
+ "file_reference/hello.txt");
+ }
+
+ SECTION("relative address") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{"", "x/x/x", "addressing"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(!error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Artifacts()->ToJson()["absolute"]["data"]["path"] ==
+ "x/x/foo");
+ CHECK(result->Artifacts()->ToJson()["relative"]["data"]["path"] ==
+ "x/x/x/x/x/foo");
+ CHECK(result->Artifacts()->ToJson()["upwards"]["data"]["path"] ==
+ "x/foo");
+ }
+}
+
+TEST_CASE("trees") {
+ SetupConfig();
+ auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap();
+ auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries);
+ auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(0);
+ auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(0);
+ static auto expressions_file_map =
+ BuildMaps::Base::CreateExpressionFileMap(0);
+ auto expr_map = BuildMaps::Base::CreateExpressionMap(&expressions_file_map);
+ auto rule_map = BuildMaps::Base::CreateRuleMap(&rule_file_map, &expr_map);
+ BuildMaps::Target::ResultTargetMap result_map{0};
+ auto target_map = BuildMaps::Target::CreateTargetMap(
+ &source, &targets_file_map, &rule_map, &result_map);
+
+ AnalysedTargetPtr result;
+ bool error{false};
+ std::string error_msg;
+ auto empty_config = Configuration{Expression::FromJson(R"({})"_json)};
+
+ SECTION("no conflict") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{"", "tree", "no conflict"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(!error);
+ CHECK(error_msg == "NONE");
+ CHECK(result->Actions().size() == 1);
+ CHECK(result->Actions()[0].ToJson()["input"]["tree"]["type"] == "TREE");
+ CHECK(result->Actions()[0].ToJson()["input"]["foo.txt"]["type"] ==
+ "LOCAL");
+ CHECK(
+ result->Actions()[0].ToJson()["input"]["foo.txt"]["data"]["path"] ==
+ "tree/foo.txt");
+ CHECK(result->Trees().size() == 1);
+ CHECK(result->Trees()[0].ToJson()["foo.txt"]["type"] == "LOCAL");
+ CHECK(result->Trees()[0].ToJson()["bar.txt"]["type"] == "LOCAL");
+ CHECK(result->Trees()[0].ToJson()["baz.txt"]["type"] == "LOCAL");
+ }
+
+ SECTION("stage into tree") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{"", "tree", "range conflict"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(error);
+ CHECK(error_msg != "NONE");
+ }
+}
+
+TEST_CASE("RESULT error reporting") {
+ SetupConfig();
+ auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap();
+ auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries);
+ auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(0);
+ auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(0);
+ static auto expressions_file_map =
+ BuildMaps::Base::CreateExpressionFileMap(0);
+ auto expr_map = BuildMaps::Base::CreateExpressionMap(&expressions_file_map);
+ auto rule_map = BuildMaps::Base::CreateRuleMap(&rule_file_map, &expr_map);
+ BuildMaps::Target::ResultTargetMap result_map{0};
+ auto target_map = BuildMaps::Target::CreateTargetMap(
+ &source, &targets_file_map, &rule_map, &result_map);
+
+ AnalysedTargetPtr result;
+ bool error{false};
+ std::string error_msg;
+ auto empty_config = Configuration{Expression::FromJson(R"({})"_json)};
+
+ SECTION("artifacts") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{"", "result", "artifacts"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(error);
+ CHECK(error_msg != "NONE");
+ CHECK(error_msg.find("artifacts-not-a-map") != std::string::npos);
+ }
+
+ SECTION("artifacts entry") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "result", "artifacts entry"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(error);
+ CHECK(error_msg != "NONE");
+ CHECK(error_msg.find("bad-artifact-entry") != std::string::npos);
+ CHECK(error_msg.find("bad-artifact-path") != std::string::npos);
+ }
+
+ SECTION("runfiles") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{"", "result", "runfiles"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(error);
+ CHECK(error_msg != "NONE");
+ CHECK(error_msg.find("runfiles-not-a-map") != std::string::npos);
+ }
+
+ SECTION("runfiles entry") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{"", "result", "runfiles entry"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(error);
+ CHECK(error_msg != "NONE");
+ CHECK(error_msg.find("bad-runfiles-entry") != std::string::npos);
+ CHECK(error_msg.find("bad-runfiles-path") != std::string::npos);
+ }
+
+ SECTION("provides") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{"", "result", "provides"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(error);
+ CHECK(error_msg != "NONE");
+ CHECK(error_msg.find("provides-not-a-map") != std::string::npos);
+ }
+}
+
+TEST_CASE("wrong arguments") {
+ SetupConfig();
+ auto directory_entries = BuildMaps::Base::CreateDirectoryEntriesMap();
+ auto source = BuildMaps::Base::CreateSourceTargetMap(&directory_entries);
+ auto targets_file_map = BuildMaps::Base::CreateTargetsFileMap(0);
+ auto rule_file_map = BuildMaps::Base::CreateRuleFileMap(0);
+ static auto expressions_file_map =
+ BuildMaps::Base::CreateExpressionFileMap(0);
+ auto expr_map = BuildMaps::Base::CreateExpressionMap(&expressions_file_map);
+ auto rule_map = BuildMaps::Base::CreateRuleMap(&rule_file_map, &expr_map);
+ BuildMaps::Target::ResultTargetMap result_map{0};
+ auto target_map = BuildMaps::Target::CreateTargetMap(
+ &source, &targets_file_map, &rule_map, &result_map);
+
+ AnalysedTargetPtr result;
+ bool error{false};
+ std::string error_msg;
+ auto empty_config = Configuration{Expression::FromJson(R"({})"_json)};
+
+ SECTION("string field") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "bad_targets", "string field"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(error);
+ CHECK(error_msg != "NONE");
+ CHECK(error_msg.find("PlAiN sTrInG") != std::string::npos);
+ }
+
+ SECTION("string field 2") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "bad_targets", "string field 2"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(error);
+ CHECK(error_msg != "NONE");
+ CHECK(error_msg.find("4711") != std::string::npos);
+ }
+
+ SECTION("config field") {
+ error = false;
+ error_msg = "NONE";
+ {
+ TaskSystem ts;
+ target_map.ConsumeAfterKeysReady(
+ &ts,
+ {BuildMaps::Target::ConfiguredTarget{
+ BuildMaps::Base::EntityName{
+ "", "bad_targets", "config field"},
+ empty_config}},
+ [&result](auto values) { result = *values[0]; },
+ [&error, &error_msg](std::string const& msg, bool /*unused*/) {
+ error = true;
+ error_msg = msg;
+ });
+ }
+ CHECK(error);
+ CHECK(error_msg != "NONE");
+ CHECK(error_msg.find("FooKey") != std::string::npos);
+ CHECK(error_msg.find("BarValue") != std::string::npos);
+ }
+}
diff --git a/test/buildtool/common/TARGETS b/test/buildtool/common/TARGETS
new file mode 100644
index 00000000..d6bcdbf1
--- /dev/null
+++ b/test/buildtool/common/TARGETS
@@ -0,0 +1,41 @@
+{ "artifact_factory":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["artifact_factory"]
+ , "srcs": ["artifact_factory.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/common", "artifact_factory"]
+ ]
+ , "stage": ["test", "buildtool", "common"]
+ }
+, "artifact_description":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["artifact_description"]
+ , "srcs": ["artifact_description.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/common", "artifact_factory"]
+ , ["src/buildtool/common", "artifact_description"]
+ ]
+ , "stage": ["test", "buildtool", "common"]
+ }
+, "action_description":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["action_description"]
+ , "srcs": ["action_description.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/common", "artifact_factory"]
+ , ["src/buildtool/common", "action_description"]
+ ]
+ , "stage": ["test", "buildtool", "common"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps": ["action_description", "artifact_description", "artifact_factory"]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/common/action_description.test.cpp b/test/buildtool/common/action_description.test.cpp
new file mode 100644
index 00000000..ac7367e1
--- /dev/null
+++ b/test/buildtool/common/action_description.test.cpp
@@ -0,0 +1,72 @@
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/action_description.hpp"
+#include "src/buildtool/common/artifact_factory.hpp"
+
+TEST_CASE("From JSON", "[action_description]") {
+ using path = std::filesystem::path;
+ auto desc =
+ ActionDescription{{"output0", "output1"},
+ {"dir0", "dir1"},
+ Action{"id", {"command", "line"}, {{"env", "vars"}}},
+ {{"path0", ArtifactDescription{path{"input0"}}},
+ {"path1", ArtifactDescription{path{"input1"}}}}};
+ auto const& action = desc.GraphAction();
+ auto json = ArtifactFactory::DescribeAction(desc.OutputFiles(),
+ desc.OutputDirs(),
+ action.Command(),
+ desc.Inputs(),
+ action.Env());
+
+ SECTION("Parse full action") {
+ auto description = ActionDescription::FromJson("id", json);
+ REQUIRE(description);
+ CHECK(description->ToJson() == json);
+ }
+
+ SECTION("Parse action without optional input") {
+ json["input"] = nlohmann::json::object();
+ CHECK(ActionDescription::FromJson("id", json));
+
+ json["input"] = nlohmann::json::array();
+ CHECK_FALSE(ActionDescription::FromJson("id", json));
+
+ json.erase("input");
+ CHECK(ActionDescription::FromJson("id", json));
+ }
+
+ SECTION("Parse action without optional env") {
+ json["env"] = nlohmann::json::object();
+ CHECK(ActionDescription::FromJson("id", json));
+
+ json["env"] = nlohmann::json::array();
+ CHECK_FALSE(ActionDescription::FromJson("id", json));
+
+ json.erase("env");
+ CHECK(ActionDescription::FromJson("id", json));
+ }
+
+ SECTION("Parse action without mandatory outputs") {
+ json["output"] = nlohmann::json::array();
+ json["output_dirs"] = nlohmann::json::array();
+ CHECK_FALSE(ActionDescription::FromJson("id", json));
+
+ json["output"] = nlohmann::json::object();
+ json["output_dirs"] = nlohmann::json::object();
+ CHECK_FALSE(ActionDescription::FromJson("id", json));
+
+ json.erase("output");
+ json.erase("output_dirs");
+ CHECK_FALSE(ActionDescription::FromJson("id", json));
+ }
+
+ SECTION("Parse action without mandatory command") {
+ json["command"] = nlohmann::json::array();
+ CHECK_FALSE(ActionDescription::FromJson("id", json));
+
+ json["command"] = nlohmann::json::object();
+ CHECK_FALSE(ActionDescription::FromJson("id", json));
+
+ json.erase("command");
+ CHECK_FALSE(ActionDescription::FromJson("id", json));
+ }
+}
diff --git a/test/buildtool/common/artifact_description.test.cpp b/test/buildtool/common/artifact_description.test.cpp
new file mode 100644
index 00000000..b1522198
--- /dev/null
+++ b/test/buildtool/common/artifact_description.test.cpp
@@ -0,0 +1,127 @@
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/artifact_description.hpp"
+#include "src/buildtool/common/artifact_factory.hpp"
+
+[[nodiscard]] auto operator==(Artifact const& lhs, Artifact const& rhs)
+ -> bool {
+ return lhs.Id() == rhs.Id() and lhs.FilePath() == rhs.FilePath() and
+ lhs.Info() == rhs.Info();
+}
+
+TEST_CASE("Local artifact", "[artifact_description]") {
+ auto local_desc =
+ ArtifactDescription{std::filesystem::path{"local_path"}, "repo"};
+ auto local = local_desc.ToArtifact();
+ auto local_from_factory =
+ ArtifactFactory::FromDescription(local_desc.ToJson());
+ CHECK(local == *local_from_factory);
+}
+
+TEST_CASE("Known artifact", "[artifact_description]") {
+ SECTION("File object") {
+ auto known_desc = ArtifactDescription{
+ ArtifactDigest{std::string{"f_fake_hash"}, 0}, ObjectType::File};
+ auto known = known_desc.ToArtifact();
+ auto known_from_factory =
+ ArtifactFactory::FromDescription(known_desc.ToJson());
+ CHECK(known == *known_from_factory);
+ }
+ SECTION("Executable object") {
+ auto known_desc =
+ ArtifactDescription{ArtifactDigest{std::string{"x_fake_hash"}, 1},
+ ObjectType::Executable};
+ auto known = known_desc.ToArtifact();
+ auto known_from_factory =
+ ArtifactFactory::FromDescription(known_desc.ToJson());
+ CHECK(known == *known_from_factory);
+ }
+}
+
+TEST_CASE("Action artifact", "[artifact_description]") {
+ auto action_desc =
+ ArtifactDescription{"action_id", std::filesystem::path{"out_path"}};
+ auto action = action_desc.ToArtifact();
+ auto action_from_factory =
+ ArtifactFactory::FromDescription(action_desc.ToJson());
+ CHECK(action == *action_from_factory);
+}
+
+TEST_CASE("From JSON", "[artifact_description]") {
+ auto local = ArtifactFactory::DescribeLocalArtifact("local", "repo");
+ auto known =
+ ArtifactFactory::DescribeKnownArtifact("hash", 0, ObjectType::File);
+ auto action = ArtifactFactory::DescribeActionArtifact("id", "output");
+
+ SECTION("Parse artifacts") {
+ CHECK(ArtifactDescription::FromJson(local));
+ CHECK(ArtifactDescription::FromJson(known));
+ CHECK(ArtifactDescription::FromJson(action));
+ }
+
+ SECTION("Parse artifact without mandatory type") {
+ local.erase("type");
+ known.erase("type");
+ action.erase("type");
+ CHECK_FALSE(ArtifactDescription::FromJson(local));
+ CHECK_FALSE(ArtifactDescription::FromJson(known));
+ CHECK_FALSE(ArtifactDescription::FromJson(action));
+ }
+
+ SECTION("Parse artifact without mandatory data") {
+ local.erase("data");
+ known.erase("data");
+ action.erase("data");
+ CHECK_FALSE(ArtifactDescription::FromJson(local));
+ CHECK_FALSE(ArtifactDescription::FromJson(known));
+ CHECK_FALSE(ArtifactDescription::FromJson(action));
+ }
+
+ SECTION("Parse local artifact without mandatory path") {
+ local["data"]["path"] = 0;
+ CHECK_FALSE(ArtifactDescription::FromJson(local));
+
+ local["data"].erase("path");
+ CHECK_FALSE(ArtifactDescription::FromJson(local));
+ }
+
+ SECTION("Parse known artifact") {
+ SECTION("without mandatory id") {
+ known["data"]["id"] = 0;
+ CHECK_FALSE(ArtifactDescription::FromJson(known));
+
+ known["data"].erase("id");
+ CHECK_FALSE(ArtifactDescription::FromJson(known));
+ }
+ SECTION("without mandatory size") {
+ known["data"]["size"] = "0";
+ CHECK_FALSE(ArtifactDescription::FromJson(known));
+
+ known["data"].erase("size");
+ CHECK_FALSE(ArtifactDescription::FromJson(known));
+ }
+ SECTION("without mandatory file_type") {
+ known["data"]["file_type"] = "more_than_one_char";
+ CHECK_FALSE(ArtifactDescription::FromJson(known));
+
+ known["data"].erase("file_type");
+ CHECK_FALSE(ArtifactDescription::FromJson(known));
+ }
+ }
+
+ SECTION("Parse action artifact") {
+ SECTION("without mandatory id") {
+ action["data"]["id"] = 0;
+ CHECK_FALSE(ArtifactDescription::FromJson(action));
+
+ action["data"].erase("id");
+ CHECK_FALSE(ArtifactDescription::FromJson(action));
+ }
+ SECTION("without mandatory path") {
+ action["data"]["path"] = 0;
+ CHECK_FALSE(ArtifactDescription::FromJson(action));
+
+ action["data"].erase("path");
+ CHECK_FALSE(ArtifactDescription::FromJson(action));
+ }
+ }
+}
diff --git a/test/buildtool/common/artifact_factory.test.cpp b/test/buildtool/common/artifact_factory.test.cpp
new file mode 100644
index 00000000..818ce410
--- /dev/null
+++ b/test/buildtool/common/artifact_factory.test.cpp
@@ -0,0 +1,54 @@
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/artifact_factory.hpp"
+
+TEST_CASE("Description missing mandatory key/value pair",
+ "[artifact_factory]") {
+
+ nlohmann::json const missing_type = {{"data", {{"path", "some/path"}}}};
+ CHECK(not ArtifactFactory::FromDescription(missing_type));
+ nlohmann::json const missing_data = {{"type", "LOCAL"}};
+ CHECK(not ArtifactFactory::FromDescription(missing_data));
+}
+
+TEST_CASE("Local artifact description contains incorrect value for \"data\"",
+ "[artifact_factory]") {
+ nlohmann::json const local_art_missing_path = {
+ {"type", "LOCAL"}, {"data", nlohmann::json::object()}};
+ CHECK(not ArtifactFactory::FromDescription(local_art_missing_path));
+}
+
+TEST_CASE("Known artifact description contains incorrect value for \"data\"",
+ "[artifact_factory]") {
+ std::string file_type{};
+ file_type += ToChar(ObjectType::File);
+ SECTION("missing \"id\"") {
+ nlohmann::json const known_art_missing_id = {
+ {"type", "KNOWN"},
+ {"data", {{"size", 15}, {"file_type", file_type}}}};
+ CHECK(not ArtifactFactory::FromDescription(known_art_missing_id));
+ }
+ SECTION("missing \"size\"") {
+ nlohmann::json const known_art_missing_size = {
+ {"type", "KNOWN"},
+ {"data", {{"id", "known_input"}, {"file_type", file_type}}}};
+ CHECK(not ArtifactFactory::FromDescription(known_art_missing_size));
+ }
+ SECTION("missing \"file_type\"") {
+ nlohmann::json const known_art_missing_file_type = {
+ {"type", "KNOWN"}, {"data", {{"id", "known_input"}, {"size", 15}}}};
+
+ CHECK(
+ not ArtifactFactory::FromDescription(known_art_missing_file_type));
+ }
+}
+
+TEST_CASE("Action artifact description contains incorrect value for \"data\"",
+ "[artifact_factory]") {
+ nlohmann::json const action_art_missing_id = {
+ {"type", "ACTION"}, {"data", {{"path", "output/path"}}}};
+ CHECK(not ArtifactFactory::FromDescription(action_art_missing_id));
+
+ nlohmann::json const action_art_missing_path = {
+ {"type", "ACTION"}, {"data", {{"id", "action_id"}}}};
+ CHECK(not ArtifactFactory::FromDescription(action_art_missing_path));
+}
diff --git a/test/buildtool/crypto/TARGETS b/test/buildtool/crypto/TARGETS
new file mode 100644
index 00000000..2f604833
--- /dev/null
+++ b/test/buildtool/crypto/TARGETS
@@ -0,0 +1,13 @@
+{ "crypto":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["crypto"]
+ , "srcs": ["crypto.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/crypto", "hash_generator"]
+ ]
+ , "stage": ["test", "buildtool", "crypto"]
+ }
+, "TESTS": {"type": "install", "tainted": ["test"], "deps": ["crypto"]}
+} \ No newline at end of file
diff --git a/test/buildtool/crypto/crypto.test.cpp b/test/buildtool/crypto/crypto.test.cpp
new file mode 100644
index 00000000..78bea66b
--- /dev/null
+++ b/test/buildtool/crypto/crypto.test.cpp
@@ -0,0 +1,57 @@
+#include <algorithm>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/crypto/hash_generator.hpp"
+
+template <HashGenerator::HashType type>
+void test_single_hash(std::string const& bytes, std::string const& result) {
+ HashGenerator hash_gen{type};
+ auto digest = hash_gen.Run(bytes);
+ CHECK(digest.HexString() == result);
+}
+
+template <HashGenerator::HashType type>
+void test_increment_hash(std::string const& bytes, std::string const& result) {
+ HashGenerator hash_gen{type};
+ auto hasher = hash_gen.IncrementalHasher();
+ hasher.Update(bytes);
+ auto digest = std::move(hasher).Finalize();
+ CHECK(digest);
+ CHECK(digest->HexString() == result);
+}
+
+TEST_CASE("Hash Generator", "[crypto]") {
+ std::string bytes{"test"};
+
+ SECTION("MD5") {
+ // same as: echo -n test | md5sum
+ test_single_hash<HashGenerator::HashType::MD5>(
+ bytes, "098f6bcd4621d373cade4e832627b4f6");
+ test_increment_hash<HashGenerator::HashType::MD5>(
+ bytes, "098f6bcd4621d373cade4e832627b4f6");
+ }
+
+ SECTION("SHA-1") {
+ // same as: echo -n test | sha1sum
+ test_single_hash<HashGenerator::HashType::SHA1>(
+ bytes, "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3");
+ test_increment_hash<HashGenerator::HashType::SHA1>(
+ bytes, "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3");
+ }
+
+ SECTION("SHA-256") {
+ // same as: echo -n test | sha256sum
+ test_single_hash<HashGenerator::HashType::SHA256>(
+ bytes,
+ "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08");
+ test_increment_hash<HashGenerator::HashType::SHA256>(
+ bytes,
+ "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08");
+ }
+
+ SECTION("Git") {
+ // same as: echo -n test | git hash-object --stdin
+ test_single_hash<HashGenerator::HashType::GIT>(
+ bytes, "30d74d258442c7c65512eafab474568dd706c430");
+ }
+}
diff --git a/test/buildtool/execution_api/TARGETS b/test/buildtool/execution_api/TARGETS
new file mode 100644
index 00000000..a7e5e8cd
--- /dev/null
+++ b/test/buildtool/execution_api/TARGETS
@@ -0,0 +1,30 @@
+{ "local_tree_map":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["local_tree_map"]
+ , "srcs": ["local_tree_map.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/execution_api/local", "local"]
+ , ["src/utils/cpp", "atomic"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api"]
+ }
+, "test_data":
+ { "type": ["@", "rules", "data", "staged"]
+ , "srcs":
+ [ "data/executable_file"
+ , "data/non_executable_file"
+ , "data/subdir1/file1"
+ , "data/subdir1/subdir2/file2"
+ ]
+ , "stage": ["test", "buildtool", "execution_api"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps": ["local_tree_map"]
+ , "dirs":
+ [[["./", "bazel", "TESTS"], "bazel"], [["./", "local", "TESTS"], "local"]]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/execution_api/bazel/TARGETS b/test/buildtool/execution_api/bazel/TARGETS
new file mode 100644
index 00000000..74890977
--- /dev/null
+++ b/test/buildtool/execution_api/bazel/TARGETS
@@ -0,0 +1,87 @@
+{ "ac_client":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["ac_client"]
+ , "srcs": ["bazel_ac_client.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test/utils", "catch-main-remote-execution"]
+ , ["test/utils", "execution_bazel"]
+ , ["src/buildtool/execution_api/remote", "bazel_network"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "bazel"]
+ }
+, "cas_client":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["cas_client"]
+ , "srcs": ["bazel_cas_client.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test/utils", "catch-main-remote-execution"]
+ , ["src/buildtool/execution_api/remote", "bazel_network"]
+ , ["src/buildtool/execution_api/remote", "config"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "bazel"]
+ }
+, "execution_client":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["execution_client"]
+ , "srcs": ["bazel_execution_client.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test/utils", "catch-main-remote-execution"]
+ , ["test/utils", "execution_bazel"]
+ , ["src/buildtool/execution_api/remote", "bazel_network"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "bazel"]
+ }
+, "bytestream_client":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["bytestream_client"]
+ , "srcs": ["bytestream_client.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test/utils", "catch-main-remote-execution"]
+ , ["test/utils", "execution_bazel"]
+ , ["src/buildtool/execution_api/remote", "bazel_network"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "bazel"]
+ }
+, "network":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["network"]
+ , "srcs": ["bazel_network.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test/utils", "catch-main-remote-execution"]
+ , ["test/utils", "execution_bazel"]
+ , ["src/buildtool/execution_api/remote", "bazel_network"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "bazel"]
+ }
+, "msg_factory":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["msg_factory"]
+ , "srcs": ["bazel_msg_factory.test.cpp"]
+ , "data": [["test/buildtool/execution_api", "test_data"]]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/common", "artifact_factory"]
+ , ["src/buildtool/execution_api/bazel_msg", "bazel_msg_factory"]
+ , ["src/buildtool/file_system", "object_type"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "bazel"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps":
+ [ "ac_client"
+ , "bytestream_client"
+ , "cas_client"
+ , "execution_client"
+ , "msg_factory"
+ , "network"
+ ]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/execution_api/bazel/bazel_ac_client.test.cpp b/test/buildtool/execution_api/bazel/bazel_ac_client.test.cpp
new file mode 100644
index 00000000..4a352a8e
--- /dev/null
+++ b/test/buildtool/execution_api/bazel/bazel_ac_client.test.cpp
@@ -0,0 +1,50 @@
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_ac_client.hpp"
+#include "src/buildtool/execution_api/remote/config.hpp"
+#include "test/utils/remote_execution/bazel_action_creator.hpp"
+#include "test/utils/test_env.hpp"
+
+auto CreateActionCacheEntry(BazelAcClient* ac_client,
+ std::string const& instance_name,
+ bazel_re::Digest const& action_id,
+ std::string const& output) {
+ bazel_re::ActionResult result{};
+ result.set_stdout_raw(output);
+ REQUIRE(ac_client->UpdateActionResult(instance_name, action_id, result, 1));
+}
+
+// IMPORTANT: we are hiding this test case because the version of buildbarn we
+// are currently using does not allow us to upload the action to the AC
+// directly. The test was not failing due to a similar action being updated by
+// another test (and lack of hermeticity), so it is better to disable it than to
+// have it fail if we change that other test or reset the buildbarn server and
+// run only the current test case. See issue#30 in
+// https://rnd-gitlab-eu-c.huawei.com/germany-research-center/intelligent-cloud-technologies-laboratory/9424510-devcloud-build-tool-technology-project-de/-/issues/30
+TEST_CASE("Bazel internals: AC Client", "[!hide][execution_api]") {
+ auto const& info = RemoteExecutionConfig::Instance();
+
+ BazelAcClient ac_client(info.Host(), info.Port());
+
+ std::string instance_name{"remote-execution"};
+ std::string content("test");
+ auto test_digest = ArtifactDigest::Create(content);
+
+ auto action_id = CreateAction(instance_name,
+ {"echo", "-n", content},
+ {},
+ ReadPlatformPropertiesFromEnv());
+ REQUIRE(action_id);
+
+ // TODO(investigate): Upload fails due to permission issues. The BuildBarn
+ // revision we are currently using seems to ignore the
+ // 'allowAcUpdatesForInstances' setting.
+ CreateActionCacheEntry(&ac_client, instance_name, *action_id, content);
+
+ auto ac_result =
+ ac_client.GetActionResult(instance_name, *action_id, true, true, {});
+ REQUIRE(ac_result);
+ CHECK(std::equal_to<bazel_re::Digest>{}(ac_result->stdout_digest(),
+ test_digest));
+}
diff --git a/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp b/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp
new file mode 100644
index 00000000..30866ffe
--- /dev/null
+++ b/test/buildtool/execution_api/bazel/bazel_cas_client.test.cpp
@@ -0,0 +1,73 @@
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp"
+#include "src/buildtool/execution_api/remote/config.hpp"
+
+TEST_CASE("Bazel internals: CAS Client", "[execution_api]") {
+ auto const& info = RemoteExecutionConfig::Instance();
+
+ std::string instance_name{"remote-execution"};
+ std::string content("test");
+
+ // Create CAS client
+ BazelCasClient cas_client(info.Host(), info.Port());
+
+ SECTION("Valid digest and blob") {
+ // digest of "test"
+ auto digest = ArtifactDigest::Create(content);
+
+ // Valid blob
+ BazelBlob blob{digest, content};
+
+ // Search blob via digest
+ auto digests = cas_client.FindMissingBlobs(instance_name, {digest});
+ CHECK(digests.size() <= 1);
+
+ if (!digests.empty()) {
+ // Upload blob, if not found
+ std::vector<BazelBlob> to_upload{blob};
+ CHECK(cas_client
+ .BatchUpdateBlobs(
+ instance_name, to_upload.begin(), to_upload.end())
+ .size() == 1);
+ }
+
+ // Read blob
+ std::vector<bazel_re::Digest> to_read{digest};
+ auto blobs = cas_client.BatchReadBlobs(
+ instance_name, to_read.begin(), to_read.end());
+ REQUIRE(blobs.size() == 1);
+ CHECK(std::equal_to<bazel_re::Digest>{}(blobs[0].digest, digest));
+ CHECK(blobs[0].data == content);
+ }
+
+ SECTION("Invalid digest and blob") {
+ // Faulty digest
+ bazel_re::Digest faulty_digest{};
+ faulty_digest.set_hash(
+ "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef");
+ faulty_digest.set_size_bytes(4);
+
+ // Faulty blob
+ BazelBlob faulty_blob{faulty_digest, content};
+
+ // Search faulty digest
+ CHECK(cas_client.FindMissingBlobs(instance_name, {faulty_digest})
+ .size() == 1);
+
+ // Try upload faulty blob
+ std::vector<BazelBlob> to_upload{faulty_blob};
+ CHECK(cas_client
+ .BatchUpdateBlobs(
+ instance_name, to_upload.begin(), to_upload.end())
+ .empty());
+
+ // Read blob via faulty digest
+ std::vector<bazel_re::Digest> to_read{faulty_digest};
+ CHECK(cas_client
+ .BatchReadBlobs(instance_name, to_read.begin(), to_read.end())
+ .empty());
+ }
+}
diff --git a/test/buildtool/execution_api/bazel/bazel_execution_client.test.cpp b/test/buildtool/execution_api/bazel/bazel_execution_client.test.cpp
new file mode 100755
index 00000000..a80eb1af
--- /dev/null
+++ b/test/buildtool/execution_api/bazel/bazel_execution_client.test.cpp
@@ -0,0 +1,102 @@
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp"
+#include "src/buildtool/execution_api/remote/config.hpp"
+#include "test/utils/remote_execution/bazel_action_creator.hpp"
+#include "test/utils/test_env.hpp"
+
+TEST_CASE("Bazel internals: Execution Client", "[execution_api]") {
+ auto const& info = RemoteExecutionConfig::Instance();
+
+ std::string instance_name{"remote-execution"};
+ std::string content("test");
+ auto test_digest = ArtifactDigest::Create(content);
+
+ BazelExecutionClient execution_client(info.Host(), info.Port());
+
+ ExecutionConfiguration config;
+ config.skip_cache_lookup = false;
+
+ SECTION("Immediate execution and response") {
+ auto action_immediate = CreateAction(instance_name,
+ {"echo", "-n", content},
+ {},
+ ReadPlatformPropertiesFromEnv());
+ REQUIRE(action_immediate);
+
+ auto response = execution_client.Execute(
+ instance_name, *action_immediate, config, true);
+
+ REQUIRE(response.state ==
+ BazelExecutionClient::ExecutionResponse::State::Finished);
+ REQUIRE(response.output);
+
+ CHECK(response.output->action_result.stdout_digest().hash() ==
+ test_digest.hash());
+ }
+
+ SECTION("Delayed execution") {
+ auto action_delayed =
+ CreateAction(instance_name,
+ {"sh", "-c", "sleep 1s; echo -n test"},
+ {},
+ ReadPlatformPropertiesFromEnv());
+
+ SECTION("Blocking, immediately obtain result") {
+ auto response = execution_client.Execute(
+ instance_name, *action_delayed, config, true);
+
+ REQUIRE(response.state ==
+ BazelExecutionClient::ExecutionResponse::State::Finished);
+ REQUIRE(response.output);
+
+ CHECK(response.output->action_result.stdout_digest().hash() ==
+ test_digest.hash());
+ }
+
+ SECTION("Non-blocking, obtain result later") {
+ auto response = execution_client.Execute(
+ instance_name, *action_delayed, config, false);
+
+ REQUIRE(response.state ==
+ BazelExecutionClient::ExecutionResponse::State::Ongoing);
+ response =
+ execution_client.WaitExecution(response.execution_handle);
+ REQUIRE(response.output);
+
+ CHECK(response.output->action_result.stdout_digest().hash() ==
+ test_digest.hash());
+ }
+ }
+}
+
+TEST_CASE("Bazel internals: Execution Client using env variables",
+ "[execution_api]") {
+ auto const& info = RemoteExecutionConfig::Instance();
+
+ std::string instance_name{"remote-execution"};
+ std::string content("contents of env variable");
+ auto test_digest = ArtifactDigest::Create(content);
+
+ BazelExecutionClient execution_client(info.Host(), info.Port());
+
+ ExecutionConfiguration config;
+ config.skip_cache_lookup = false;
+ auto action =
+ CreateAction(instance_name,
+ {"/bin/sh", "-c", "set -e\necho -n ${MYTESTVAR}"},
+ {{"MYTESTVAR", content}},
+ ReadPlatformPropertiesFromEnv());
+ REQUIRE(action);
+
+ auto response =
+ execution_client.Execute(instance_name, *action, config, true);
+
+ REQUIRE(response.state ==
+ BazelExecutionClient::ExecutionResponse::State::Finished);
+ REQUIRE(response.output);
+
+ CHECK(response.output->action_result.stdout_digest().hash() ==
+ test_digest.hash());
+}
diff --git a/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp b/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp
new file mode 100644
index 00000000..050af817
--- /dev/null
+++ b/test/buildtool/execution_api/bazel/bazel_msg_factory.test.cpp
@@ -0,0 +1,53 @@
+#include <filesystem>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/artifact_factory.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob_container.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_msg_factory.hpp"
+#include "src/buildtool/file_system/object_type.hpp"
+
+TEST_CASE("Bazel internals: MessageFactory", "[execution_api]") {
+ std::filesystem::path workspace{"test/buildtool/execution_api/data"};
+
+ std::filesystem::path subdir1 = workspace / "subdir1";
+ std::filesystem::path subdir2 = subdir1 / "subdir2";
+ std::filesystem::path file1 = subdir1 / "file1";
+ std::filesystem::path file2 = subdir2 / "file2";
+
+ auto file1_blob = CreateBlobFromFile(file1);
+ auto file2_blob = CreateBlobFromFile(file2);
+
+ CHECK(file1_blob);
+ CHECK(file2_blob);
+
+ // both files are the same and should result in identical blobs
+ CHECK(file1_blob->data == file2_blob->data);
+ CHECK(file1_blob->digest.hash() == file2_blob->digest.hash());
+ CHECK(file1_blob->digest.size_bytes() == file2_blob->digest.size_bytes());
+
+ // create known artifacts
+ auto artifact1_opt =
+ ArtifactFactory::FromDescription(ArtifactFactory::DescribeKnownArtifact(
+ file1_blob->digest.hash(),
+ static_cast<std::size_t>(file1_blob->digest.size_bytes()),
+ ObjectType::File));
+ CHECK(artifact1_opt.has_value());
+ auto artifact1 = DependencyGraph::ArtifactNode{std::move(*artifact1_opt)};
+
+ auto artifact2_opt =
+ ArtifactFactory::FromDescription(ArtifactFactory::DescribeKnownArtifact(
+ file2_blob->digest.hash(),
+ static_cast<std::size_t>(file2_blob->digest.size_bytes()),
+ ObjectType::File));
+ CHECK(artifact2_opt.has_value());
+ auto artifact2 = DependencyGraph::ArtifactNode{std::move(*artifact2_opt)};
+
+ // create blobs via tree
+ BlobContainer blobs{};
+ REQUIRE(BazelMsgFactory::CreateDirectoryDigestFromTree(
+ {{file1.string(), &artifact1}, {file2.string(), &artifact2}},
+ [&blobs](BazelBlob&& blob) { blobs.Emplace(std::move(blob)); }));
+
+ // TODO(aehlig): also check total number of DirectoryNode blobs in container
+}
diff --git a/test/buildtool/execution_api/bazel/bazel_network.test.cpp b/test/buildtool/execution_api/bazel/bazel_network.test.cpp
new file mode 100644
index 00000000..6a668406
--- /dev/null
+++ b/test/buildtool/execution_api/bazel/bazel_network.test.cpp
@@ -0,0 +1,45 @@
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_execution_client.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_network.hpp"
+#include "src/buildtool/execution_api/remote/config.hpp"
+
+constexpr std::size_t kLargeSize = GRPC_DEFAULT_MAX_RECV_MESSAGE_LENGTH + 1;
+
+TEST_CASE("Bazel network: write/read blobs", "[execution_api]") {
+ auto const& info = RemoteExecutionConfig::Instance();
+ std::string instance_name{"remote-execution"};
+ auto network = BazelNetwork{instance_name, info.Host(), info.Port(), {}};
+
+ std::string content_foo("foo");
+ std::string content_bar("bar");
+ std::string content_baz(kLargeSize, 'x'); // single larger blob
+
+ BazelBlob foo{ArtifactDigest::Create(content_foo), content_foo};
+ BazelBlob bar{ArtifactDigest::Create(content_bar), content_bar};
+ BazelBlob baz{ArtifactDigest::Create(content_baz), content_baz};
+
+ // Search blobs via digest
+ REQUIRE(network.UploadBlobs(BlobContainer{{foo, bar, baz}}));
+
+ // Read blobs in order
+ auto reader = network.ReadBlobs(
+ {foo.digest, bar.digest, baz.digest, bar.digest, foo.digest});
+ std::vector<BazelBlob> blobs{};
+ while (true) {
+ auto next = reader.Next();
+ if (next.empty()) {
+ break;
+ }
+ blobs.insert(blobs.end(), next.begin(), next.end());
+ }
+
+ // Check order maintained
+ REQUIRE(blobs.size() == 5);
+ CHECK(blobs[0].data == content_foo);
+ CHECK(blobs[1].data == content_bar);
+ CHECK(blobs[2].data == content_baz);
+ CHECK(blobs[3].data == content_bar);
+ CHECK(blobs[4].data == content_foo);
+}
diff --git a/test/buildtool/execution_api/bazel/bytestream_client.test.cpp b/test/buildtool/execution_api/bazel/bytestream_client.test.cpp
new file mode 100644
index 00000000..fa09862c
--- /dev/null
+++ b/test/buildtool/execution_api/bazel/bytestream_client.test.cpp
@@ -0,0 +1,169 @@
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/execution_api/bazel_msg/bazel_blob.hpp"
+#include "src/buildtool/execution_api/common/execution_common.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bytestream_client.hpp"
+#include "src/buildtool/execution_api/remote/config.hpp"
+
+constexpr std::size_t kLargeSize = GRPC_DEFAULT_MAX_RECV_MESSAGE_LENGTH + 1;
+
+TEST_CASE("ByteStream Client: Transfer single blob", "[execution_api]") {
+ auto const& info = RemoteExecutionConfig::Instance();
+ auto stream = ByteStreamClient{info.Host(), info.Port()};
+ auto uuid = CreateUUIDVersion4(*CreateProcessUniqueId());
+
+ SECTION("Upload small blob") {
+ std::string instance_name{"remote-execution"};
+ std::string content("foobar");
+
+ // digest of "foobar"
+ auto digest = ArtifactDigest::Create(content);
+
+ CHECK(stream.Write(fmt::format("{}/uploads/{}/blobs/{}/{}",
+ instance_name,
+ uuid,
+ digest.hash(),
+ digest.size()),
+ content));
+
+ SECTION("Download small blob") {
+ auto data = stream.Read(fmt::format(
+ "{}/blobs/{}/{}", instance_name, digest.hash(), digest.size()));
+
+ CHECK(data == content);
+ }
+ }
+
+ SECTION("Upload large blob") {
+ std::string instance_name{"remote-execution"};
+
+ std::string content(kLargeSize, '\0');
+ for (std::size_t i{}; i < content.size(); ++i) {
+ content[i] = instance_name[i % instance_name.size()];
+ }
+
+ // digest of "instance_nameinstance_nameinstance_..."
+ auto digest = ArtifactDigest::Create(content);
+
+ CHECK(stream.Write(fmt::format("{}/uploads/{}/blobs/{}/{}",
+ instance_name,
+ uuid,
+ digest.hash(),
+ digest.size()),
+ content));
+
+ SECTION("Download large blob") {
+ auto data = stream.Read(fmt::format(
+ "{}/blobs/{}/{}", instance_name, digest.hash(), digest.size()));
+
+ CHECK(data == content);
+ }
+
+ SECTION("Incrementally download large blob") {
+ auto reader = stream.IncrementalRead(fmt::format(
+ "{}/blobs/{}/{}", instance_name, digest.hash(), digest.size()));
+
+ std::string data{};
+ auto chunk = reader.Next();
+ while (chunk and not chunk->empty()) {
+ data.append(chunk->begin(), chunk->end());
+ chunk = reader.Next();
+ }
+
+ CHECK(chunk);
+ CHECK(data == content);
+ }
+ }
+}
+
+TEST_CASE("ByteStream Client: Transfer multiple blobs", "[execution_api]") {
+ auto const& info = RemoteExecutionConfig::Instance();
+ auto stream = ByteStreamClient{info.Host(), info.Port()};
+ auto uuid = CreateUUIDVersion4(*CreateProcessUniqueId());
+
+ SECTION("Upload small blobs") {
+ std::string instance_name{"remote-execution"};
+
+ BazelBlob foo{ArtifactDigest::Create("foo"), "foo"};
+ BazelBlob bar{ArtifactDigest::Create("bar"), "bar"};
+ BazelBlob baz{ArtifactDigest::Create("baz"), "baz"};
+
+ CHECK(stream.WriteMany<BazelBlob>(
+ {foo, bar, baz},
+ [&instance_name, &uuid](auto const& blob) {
+ return fmt::format("{}/uploads/{}/blobs/{}/{}",
+ instance_name,
+ uuid,
+ blob.digest.hash(),
+ blob.digest.size_bytes());
+ },
+ [](auto const& blob) { return blob.data; }));
+
+ SECTION("Download small blobs") {
+ std::vector<std::string> contents{};
+ stream.ReadMany<bazel_re::Digest>(
+ {foo.digest, bar.digest, baz.digest},
+ [&instance_name](auto const& digest) -> std::string {
+ return fmt::format("{}/blobs/{}/{}",
+ instance_name,
+ digest.hash(),
+ digest.size_bytes());
+ },
+ [&contents](auto data) {
+ contents.emplace_back(std::move(data));
+ });
+ REQUIRE(contents.size() == 3);
+ CHECK(contents[0] == foo.data);
+ CHECK(contents[1] == bar.data);
+ CHECK(contents[2] == baz.data);
+ }
+ }
+
+ SECTION("Upload large blobs") {
+ std::string instance_name{"remote-execution"};
+
+ std::string content_foo(kLargeSize, '\0');
+ std::string content_bar(kLargeSize, '\0');
+ std::string content_baz(kLargeSize, '\0');
+ for (std::size_t i{}; i < content_foo.size(); ++i) {
+ content_foo[i] = instance_name[(i + 0) % instance_name.size()];
+ content_bar[i] = instance_name[(i + 1) % instance_name.size()];
+ content_baz[i] = instance_name[(i + 2) % instance_name.size()];
+ }
+
+ BazelBlob foo{ArtifactDigest::Create(content_foo), content_foo};
+ BazelBlob bar{ArtifactDigest::Create(content_bar), content_bar};
+ BazelBlob baz{ArtifactDigest::Create(content_baz), content_baz};
+
+ CHECK(stream.WriteMany<BazelBlob>(
+ {foo, bar, baz},
+ [&instance_name, &uuid](auto const& blob) {
+ return fmt::format("{}/uploads/{}/blobs/{}/{}",
+ instance_name,
+ uuid,
+ blob.digest.hash(),
+ blob.digest.size_bytes());
+ },
+ [](auto const& blob) { return blob.data; }));
+
+ SECTION("Download large blobs") {
+ std::vector<std::string> contents{};
+ stream.ReadMany<bazel_re::Digest>(
+ {foo.digest, bar.digest, baz.digest},
+ [&instance_name](auto const& digest) -> std::string {
+ return fmt::format("{}/blobs/{}/{}",
+ instance_name,
+ digest.hash(),
+ digest.size_bytes());
+ },
+ [&contents](auto data) {
+ contents.emplace_back(std::move(data));
+ });
+ REQUIRE(contents.size() == 3);
+ CHECK(contents[0] == foo.data);
+ CHECK(contents[1] == bar.data);
+ CHECK(contents[2] == baz.data);
+ }
+ }
+}
diff --git a/test/buildtool/execution_api/data/executable_file b/test/buildtool/execution_api/data/executable_file
new file mode 100755
index 00000000..30d74d25
--- /dev/null
+++ b/test/buildtool/execution_api/data/executable_file
@@ -0,0 +1 @@
+test \ No newline at end of file
diff --git a/test/buildtool/execution_api/data/non_executable_file b/test/buildtool/execution_api/data/non_executable_file
new file mode 100755
index 00000000..30d74d25
--- /dev/null
+++ b/test/buildtool/execution_api/data/non_executable_file
@@ -0,0 +1 @@
+test \ No newline at end of file
diff --git a/test/buildtool/execution_api/data/subdir1/file1 b/test/buildtool/execution_api/data/subdir1/file1
new file mode 100644
index 00000000..30d74d25
--- /dev/null
+++ b/test/buildtool/execution_api/data/subdir1/file1
@@ -0,0 +1 @@
+test \ No newline at end of file
diff --git a/test/buildtool/execution_api/data/subdir1/subdir2/file2 b/test/buildtool/execution_api/data/subdir1/subdir2/file2
new file mode 100644
index 00000000..30d74d25
--- /dev/null
+++ b/test/buildtool/execution_api/data/subdir1/subdir2/file2
@@ -0,0 +1 @@
+test \ No newline at end of file
diff --git a/test/buildtool/execution_api/local/TARGETS b/test/buildtool/execution_api/local/TARGETS
new file mode 100644
index 00000000..1a86621f
--- /dev/null
+++ b/test/buildtool/execution_api/local/TARGETS
@@ -0,0 +1,73 @@
+{ "local_cas":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["local_cas"]
+ , "srcs": ["local_cas.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/crypto", "hash_generator"]
+ , ["src/buildtool/execution_api/local", "local"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["test/utils", "local_hermeticity"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "local"]
+ }
+, "local_ac":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["local_ac"]
+ , "srcs": ["local_ac.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/execution_api/local", "local"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["test/utils", "local_hermeticity"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "local"]
+ }
+, "local_execution":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["local_execution"]
+ , "srcs": ["local_execution.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/common", "artifact_factory"]
+ , ["src/buildtool/execution_api/local", "local"]
+ , ["test/utils", "local_hermeticity"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "local"]
+ }
+, "local_api":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["local_api"]
+ , "srcs": ["local_api.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/common", "artifact_factory"]
+ , ["src/buildtool/execution_api/local", "local"]
+ , ["test/utils", "local_hermeticity"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "local"]
+ }
+, "local_storage":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["local_storage"]
+ , "srcs": ["local_storage.test.cpp"]
+ , "data": [["test/buildtool/execution_api", "test_data"]]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/execution_api/local", "local"]
+ , ["test/utils", "local_hermeticity"]
+ ]
+ , "stage": ["test", "buildtool", "execution_api", "local"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps":
+ ["local_ac", "local_api", "local_cas", "local_execution", "local_storage"]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/execution_api/local/local_ac.test.cpp b/test/buildtool/execution_api/local/local_ac.test.cpp
new file mode 100644
index 00000000..6bbe0d64
--- /dev/null
+++ b/test/buildtool/execution_api/local/local_ac.test.cpp
@@ -0,0 +1,121 @@
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/execution_api/local/local_ac.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "test/utils/hermeticity/local.hpp"
+
+[[nodiscard]] static auto RunDummyExecution(gsl::not_null<LocalAC*> const& ac,
+ bazel_re::Digest const& action_id,
+ std::string const& seed) -> bool;
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalAC: Single action, single result",
+ "[execution_api]") {
+ LocalCAS cas{};
+ LocalAC ac{&cas};
+
+ auto action_id = ArtifactDigest::Create("action");
+ CHECK(not ac.CachedResult(action_id));
+
+ CHECK(RunDummyExecution(&ac, action_id, "result"));
+ auto ac_result = ac.CachedResult(action_id);
+ CHECK(ac_result);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalAC: Two different actions, two different results",
+ "[execution_api]") {
+ LocalCAS cas{};
+ LocalAC ac{&cas};
+
+ auto action_id1 = ArtifactDigest::Create("action1");
+ auto action_id2 = ArtifactDigest::Create("action2");
+ CHECK(not ac.CachedResult(action_id1));
+ CHECK(not ac.CachedResult(action_id2));
+
+ std::string result_content1{};
+ std::string result_content2{};
+
+ CHECK(RunDummyExecution(&ac, action_id1, "result1"));
+ auto ac_result1 = ac.CachedResult(action_id1);
+ REQUIRE(ac_result1);
+ CHECK(ac_result1->SerializeToString(&result_content1));
+
+ CHECK(RunDummyExecution(&ac, action_id2, "result2"));
+ auto ac_result2 = ac.CachedResult(action_id2);
+ REQUIRE(ac_result2);
+ CHECK(ac_result2->SerializeToString(&result_content2));
+
+ // check different actions, different result
+ CHECK(action_id1.hash() != action_id2.hash());
+ CHECK(result_content1 != result_content2);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalAC: Two different actions, same two results",
+ "[execution_api]") {
+ LocalCAS cas{};
+ LocalAC ac{&cas};
+
+ auto action_id1 = ArtifactDigest::Create("action1");
+ auto action_id2 = ArtifactDigest::Create("action2");
+ CHECK(not ac.CachedResult(action_id1));
+ CHECK(not ac.CachedResult(action_id2));
+
+ std::string result_content1{};
+ std::string result_content2{};
+
+ CHECK(RunDummyExecution(&ac, action_id1, "same result"));
+ auto ac_result1 = ac.CachedResult(action_id1);
+ REQUIRE(ac_result1);
+ CHECK(ac_result1->SerializeToString(&result_content1));
+
+ CHECK(RunDummyExecution(&ac, action_id2, "same result"));
+ auto ac_result2 = ac.CachedResult(action_id2);
+ REQUIRE(ac_result2);
+ CHECK(ac_result2->SerializeToString(&result_content2));
+
+ // check different actions, but same result
+ CHECK(action_id1.hash() != action_id2.hash());
+ CHECK(result_content1 == result_content2);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalAC: Same two actions, two differnet results",
+ "[execution_api]") {
+ LocalCAS cas{};
+ LocalAC ac{&cas};
+
+ auto action_id = ArtifactDigest::Create("same action");
+ CHECK(not ac.CachedResult(action_id));
+
+ std::string result_content1{};
+ std::string result_content2{};
+
+ CHECK(RunDummyExecution(&ac, action_id, "result1"));
+ auto ac_result1 = ac.CachedResult(action_id);
+ REQUIRE(ac_result1);
+ CHECK(ac_result1->SerializeToString(&result_content1));
+
+ CHECK(RunDummyExecution(&ac, action_id, "result2")); // updated
+ auto ac_result2 = ac.CachedResult(action_id);
+ REQUIRE(ac_result2);
+ CHECK(ac_result2->SerializeToString(&result_content2));
+
+ // check same actions, different cached result
+ CHECK(result_content1 != result_content2);
+}
+
+auto RunDummyExecution(gsl::not_null<LocalAC*> const& ac,
+ bazel_re::Digest const& action_id,
+ std::string const& seed) -> bool {
+ bazel_re::ActionResult result{};
+ *result.add_output_files() = [&]() {
+ bazel_re::OutputFile out{};
+ out.set_path(seed);
+ return out;
+ }();
+ return ac->StoreResult(action_id, result);
+}
diff --git a/test/buildtool/execution_api/local/local_api.test.cpp b/test/buildtool/execution_api/local/local_api.test.cpp
new file mode 100644
index 00000000..39eaa6f0
--- /dev/null
+++ b/test/buildtool/execution_api/local/local_api.test.cpp
@@ -0,0 +1,299 @@
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/artifact_factory.hpp"
+#include "src/buildtool/execution_api/common/execution_action.hpp"
+#include "src/buildtool/execution_api/common/execution_api.hpp"
+#include "src/buildtool/execution_api/common/execution_response.hpp"
+#include "src/buildtool/execution_api/local/local_api.hpp"
+#include "test/utils/hermeticity/local.hpp"
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalAPI: No input, no output",
+ "[execution_api]") {
+ std::string test_content("test");
+
+ auto api = LocalApi();
+
+ auto action = api.CreateAction(
+ *api.UploadTree({}), {"echo", "-n", test_content}, {}, {}, {}, {});
+
+ SECTION("Cache execution result in action cache") {
+ action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput);
+
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ CHECK(response->HasStdOut());
+ CHECK(response->StdOut() == test_content);
+ CHECK(not response->IsCached());
+
+ SECTION("Rerun execution to verify caching") {
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ CHECK(response->HasStdOut());
+ CHECK(response->StdOut() == test_content);
+ CHECK(response->IsCached());
+ }
+ }
+
+ SECTION("Do not cache execution result in action cache") {
+ action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput);
+
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ CHECK(response->HasStdOut());
+ CHECK(response->StdOut() == test_content);
+ CHECK(not response->IsCached());
+
+ SECTION("Rerun execution to verify caching") {
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ CHECK(response->HasStdOut());
+ CHECK(response->StdOut() == test_content);
+ CHECK(not response->IsCached());
+ }
+ }
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalAPI: No input, create output",
+ "[execution_api]") {
+ std::string test_content("test");
+ auto test_digest = ArtifactDigest::Create(test_content);
+
+ std::string output_path{"output_file"};
+
+ auto api = LocalApi();
+
+ auto action = api.CreateAction(
+ *api.UploadTree({}),
+ {"/bin/sh",
+ "-c",
+ "set -e\necho -n " + test_content + " > " + output_path},
+ {output_path},
+ {},
+ {},
+ {});
+
+ SECTION("Cache execution result in action cache") {
+ action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput);
+
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(not response->IsCached());
+
+ SECTION("Rerun execution to verify caching") {
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(response->IsCached());
+ }
+ }
+
+ SECTION("Do not cache execution result in action cache") {
+ action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput);
+
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(not response->IsCached());
+
+ SECTION("Rerun execution to verify caching") {
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(not response->IsCached());
+ }
+ }
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalAPI: One input copied to output",
+ "[execution_api]") {
+ std::string test_content("test");
+ auto test_digest = ArtifactDigest::Create(test_content);
+
+ auto input_artifact_opt =
+ ArtifactFactory::FromDescription(ArtifactFactory::DescribeKnownArtifact(
+ test_digest.hash(), test_digest.size(), ObjectType::File));
+ CHECK(input_artifact_opt.has_value());
+ auto input_artifact =
+ DependencyGraph::ArtifactNode{std::move(*input_artifact_opt)};
+
+ std::string input_path{"dir/subdir/input"};
+ std::string output_path{"output_file"};
+
+ auto api = LocalApi();
+ CHECK(api.Upload(BlobContainer{{BazelBlob{test_digest, test_content}}},
+ false));
+
+ auto action =
+ api.CreateAction(*api.UploadTree({{input_path, &input_artifact}}),
+ {"cp", input_path, output_path},
+ {output_path},
+ {},
+ {},
+ {});
+
+ SECTION("Cache execution result in action cache") {
+ action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput);
+
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(not response->IsCached());
+
+ SECTION("Rerun execution to verify caching") {
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(response->IsCached());
+ }
+ }
+
+ SECTION("Do not cache execution result in action cache") {
+ action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput);
+
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(not response->IsCached());
+
+ SECTION("Rerun execution to verify caching") {
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(not response->IsCached());
+ }
+ }
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalAPI: Non-zero exit code, create output",
+ "[execution_api]") {
+ std::string test_content("test");
+ auto test_digest = ArtifactDigest::Create(test_content);
+
+ std::string output_path{"output_file"};
+
+ auto api = LocalApi();
+
+ auto action = api.CreateAction(*api.UploadTree({}),
+ {"/bin/sh",
+ "-c",
+ "set -e\necho -n " + test_content + " > " +
+ output_path + "\nexit 1\n"},
+ {output_path},
+ {},
+ {},
+ {});
+
+ SECTION("Cache execution result in action cache") {
+ action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput);
+
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ CHECK(response->ExitCode() == 1);
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(not response->IsCached());
+
+ SECTION("Rerun execution to verify that non-zero actions are rerun") {
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ CHECK(response->ExitCode() == 1);
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(not response->IsCached());
+ }
+ }
+
+ SECTION("Do not cache execution result in action cache") {
+ action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput);
+
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ CHECK(response->ExitCode() == 1);
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(not response->IsCached());
+
+ SECTION("Rerun execution to verify non-zero actions are not cached") {
+ // run execution
+ auto response = action->Execute();
+ REQUIRE(response);
+
+ // verify result
+ CHECK(response->ExitCode() == 1);
+ auto artifacts = response->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+ CHECK(not response->IsCached());
+ }
+ }
+}
diff --git a/test/buildtool/execution_api/local/local_cas.test.cpp b/test/buildtool/execution_api/local/local_cas.test.cpp
new file mode 100644
index 00000000..9f43ac6a
--- /dev/null
+++ b/test/buildtool/execution_api/local/local_cas.test.cpp
@@ -0,0 +1,88 @@
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/crypto/hash_generator.hpp"
+#include "src/buildtool/execution_api/local/local_cas.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "test/utils/hermeticity/local.hpp"
+
+TEST_CASE_METHOD(HermeticLocalTestFixture, "LocalCAS", "[execution_api]") {
+ std::string test_content{"test"};
+ auto test_digest = ArtifactDigest::Create(test_content);
+
+ SECTION("CAS for files") {
+ LocalCAS<ObjectType::File> cas{};
+ CHECK(not cas.BlobPath(test_digest));
+
+ SECTION("Add blob from bytes and verify") {
+ // add blob
+ auto cas_digest = cas.StoreBlobFromBytes(test_content);
+ CHECK(cas_digest);
+ CHECK(std::equal_to<bazel_re::Digest>{}(*cas_digest, test_digest));
+
+ // verify blob
+ auto blob_path = cas.BlobPath(*cas_digest);
+ REQUIRE(blob_path);
+ auto const cas_content = FileSystemManager::ReadFile(*blob_path);
+ CHECK(cas_content.has_value());
+ CHECK(cas_content == test_content);
+ CHECK(not FileSystemManager::IsExecutable(*blob_path));
+ }
+
+ SECTION("Add blob from file") {
+ CHECK(FileSystemManager::CreateDirectory("tmp"));
+ CHECK(FileSystemManager::WriteFile(test_content, "tmp/test"));
+
+ // add blob
+ auto cas_digest = cas.StoreBlobFromFile("tmp/test");
+ CHECK(cas_digest);
+ CHECK(std::equal_to<bazel_re::Digest>{}(*cas_digest, test_digest));
+
+ // verify blob
+ auto blob_path = cas.BlobPath(*cas_digest);
+ REQUIRE(blob_path);
+ auto const cas_content = FileSystemManager::ReadFile(*blob_path);
+ CHECK(cas_content.has_value());
+ CHECK(cas_content == test_content);
+ CHECK(not FileSystemManager::IsExecutable(*blob_path));
+ }
+ }
+
+ SECTION("CAS for executables") {
+ LocalCAS<ObjectType::Executable> cas{};
+ CHECK(not cas.BlobPath(test_digest));
+
+ SECTION("Add blob from bytes and verify") {
+ // add blob
+ auto cas_digest = cas.StoreBlobFromBytes(test_content);
+ CHECK(cas_digest);
+ CHECK(std::equal_to<bazel_re::Digest>{}(*cas_digest, test_digest));
+
+ // verify blob
+ auto blob_path = cas.BlobPath(*cas_digest);
+ REQUIRE(blob_path);
+ auto const cas_content = FileSystemManager::ReadFile(*blob_path);
+ CHECK(cas_content.has_value());
+ CHECK(cas_content == test_content);
+ CHECK(FileSystemManager::IsExecutable(*blob_path));
+ }
+
+ SECTION("Add blob from file") {
+ CHECK(FileSystemManager::CreateDirectory("tmp"));
+ CHECK(FileSystemManager::WriteFile(test_content, "tmp/test"));
+
+ // add blob
+ auto cas_digest = cas.StoreBlobFromFile("tmp/test");
+ CHECK(cas_digest);
+ CHECK(std::equal_to<bazel_re::Digest>{}(*cas_digest, test_digest));
+
+ // verify blob
+ auto blob_path = cas.BlobPath(*cas_digest);
+ REQUIRE(blob_path);
+ auto const cas_content = FileSystemManager::ReadFile(*blob_path);
+ CHECK(cas_content.has_value());
+ CHECK(cas_content == test_content);
+ CHECK(FileSystemManager::IsExecutable(*blob_path));
+ }
+ }
+}
diff --git a/test/buildtool/execution_api/local/local_execution.test.cpp b/test/buildtool/execution_api/local/local_execution.test.cpp
new file mode 100755
index 00000000..6c38c669
--- /dev/null
+++ b/test/buildtool/execution_api/local/local_execution.test.cpp
@@ -0,0 +1,274 @@
+#include <chrono>
+#include <string>
+#include <vector>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/artifact_factory.hpp"
+#include "src/buildtool/execution_api/local/local_api.hpp"
+#include "test/utils/hermeticity/local.hpp"
+
+namespace {
+
+[[nodiscard]] auto GetTestDir() -> std::filesystem::path {
+ auto* tmp_dir = std::getenv("TEST_TMPDIR");
+ if (tmp_dir != nullptr) {
+ return tmp_dir;
+ }
+ return FileSystemManager::GetCurrentDirectory() /
+ "test/buildtool/execution_api/local";
+}
+
+} // namespace
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalExecution: No input, no output",
+ "[execution_api]") {
+ auto api = LocalApi{};
+
+ std::string test_content("test");
+ std::vector<std::string> const cmdline = {"echo", "-n", test_content};
+ auto action =
+ api.CreateAction(*api.UploadTree({}), cmdline, {}, {}, {}, {});
+ REQUIRE(action);
+
+ SECTION("Cache execution result in action cache") {
+ // run execution
+ action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput);
+ auto output = action->Execute(nullptr);
+ REQUIRE(output);
+
+ // verify result
+ CHECK_FALSE(output->IsCached());
+ CHECK(output->StdOut() == test_content);
+
+ output = action->Execute(nullptr);
+ REQUIRE(output);
+ CHECK(output->IsCached());
+ }
+
+ SECTION("Do not cache execution result in action cache") {
+ // run execution
+ action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput);
+ auto output = action->Execute(nullptr);
+ REQUIRE(output);
+
+ // verify result
+ CHECK_FALSE(output->IsCached());
+ CHECK(output->StdOut() == test_content);
+
+ // ensure result IS STILL NOT in cache
+ output = action->Execute(nullptr);
+ REQUIRE(output);
+ CHECK_FALSE(output->IsCached());
+ }
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalExecution: No input, no output, env variables used",
+ "[execution_api]") {
+ auto api = LocalApi{};
+
+ std::string test_content("test from env var");
+ std::vector<std::string> const cmdline = {
+ "/bin/sh", "-c", "set -e\necho -n ${MYCONTENT}"};
+ auto action = api.CreateAction(*api.UploadTree({}),
+ cmdline,
+ {},
+ {},
+ {{"MYCONTENT", test_content}},
+ {});
+ REQUIRE(action);
+
+ SECTION("Cache execution result in action cache") {
+ // run execution
+ action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput);
+ auto output = action->Execute(nullptr);
+ REQUIRE(output);
+
+ // verify result
+ CHECK_FALSE(output->IsCached());
+ CHECK(output->StdOut() == test_content);
+
+ // ensure result IS in cache
+ output = action->Execute(nullptr);
+ REQUIRE(output);
+ CHECK(output->IsCached());
+ }
+
+ SECTION("Do not cache execution result in action cache") {
+ // run execution
+ action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput);
+ auto output = action->Execute(nullptr);
+ REQUIRE(output);
+
+ // verify result
+ CHECK_FALSE(output->IsCached());
+ CHECK(output->StdOut() == test_content);
+
+ // ensure result IS STILL NOT in cache
+ output = action->Execute(nullptr);
+ REQUIRE(output);
+ CHECK_FALSE(output->IsCached());
+ }
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalExecution: No input, create output",
+ "[execution_api]") {
+ auto api = LocalApi{};
+
+ std::string test_content("test");
+ auto test_digest = ArtifactDigest::Create(test_content);
+
+ std::string output_path{"output_file"};
+ std::vector<std::string> const cmdline = {
+ "/bin/sh",
+ "-c",
+ "set -e\necho -n " + test_content + " > " + output_path};
+
+ auto action = api.CreateAction(
+ *api.UploadTree({}), cmdline, {output_path}, {}, {}, {});
+ REQUIRE(action);
+
+ SECTION("Cache execution result in action cache") {
+ // run execution
+ action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput);
+ auto output = action->Execute(nullptr);
+ REQUIRE(output);
+
+ // verify result
+ CHECK_FALSE(output->IsCached());
+ auto artifacts = output->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+
+ // ensure result IS in cache
+ output = action->Execute(nullptr);
+ REQUIRE(output);
+ CHECK(output->IsCached());
+ }
+
+ SECTION("Do not cache execution result in action cache") {
+ // run execution
+ action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput);
+ auto output = action->Execute(nullptr);
+ REQUIRE(output);
+
+ // verify result
+ CHECK_FALSE(output->IsCached());
+ auto artifacts = output->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+
+ // ensure result IS STILL NOT in cache
+ output = action->Execute(nullptr);
+ REQUIRE(output);
+ CHECK_FALSE(output->IsCached());
+ }
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalExecution: One input copied to output",
+ "[execution_api]") {
+ auto api = LocalApi{};
+
+ std::string test_content("test");
+ auto test_digest = ArtifactDigest::Create(test_content);
+ REQUIRE(api.Upload(BlobContainer{{BazelBlob{test_digest, test_content}}},
+ false));
+
+ std::string input_path{"dir/subdir/input"};
+ std::string output_path{"output_file"};
+
+ std::vector<std::string> const cmdline = {"cp", input_path, output_path};
+
+ auto local_artifact_opt =
+ ArtifactFactory::FromDescription(ArtifactFactory::DescribeKnownArtifact(
+ test_digest.hash(), test_digest.size(), ObjectType::File));
+ REQUIRE(local_artifact_opt);
+ auto local_artifact =
+ DependencyGraph::ArtifactNode{std::move(*local_artifact_opt)};
+
+ auto action =
+ api.CreateAction(*api.UploadTree({{input_path, &local_artifact}}),
+ cmdline,
+ {output_path},
+ {},
+ {},
+ {});
+ REQUIRE(action);
+
+ SECTION("Cache execution result in action cache") {
+ // run execution
+ action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput);
+ auto output = action->Execute(nullptr);
+ REQUIRE(output);
+
+ // verify result
+ CHECK_FALSE(output->IsCached());
+ auto artifacts = output->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+
+ // ensure result IS in cache
+ output = action->Execute(nullptr);
+ REQUIRE(output);
+ CHECK(output->IsCached());
+ }
+
+ SECTION("Do not cache execution result in action cache") {
+ // run execution
+ action->SetCacheFlag(IExecutionAction::CacheFlag::DoNotCacheOutput);
+ auto output = action->Execute(nullptr);
+ REQUIRE(output);
+
+ // verify result
+ CHECK_FALSE(output->IsCached());
+ auto artifacts = output->Artifacts();
+ REQUIRE(artifacts.contains(output_path));
+ CHECK(artifacts.at(output_path).digest == test_digest);
+
+ // ensure result IS STILL NOT in cache
+ output = action->Execute(nullptr);
+ REQUIRE(output);
+ CHECK_FALSE(output->IsCached());
+ }
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalExecution: Cache failed action's result",
+ "[execution_api]") {
+ auto api = LocalApi{};
+
+ auto flag = GetTestDir() / "flag";
+ std::vector<std::string> const cmdline = {
+ "sh", "-c", fmt::format("[ -f '{}' ]", flag.string())};
+
+ auto action =
+ api.CreateAction(*api.UploadTree({}), cmdline, {}, {}, {}, {});
+ REQUIRE(action);
+
+ action->SetCacheFlag(IExecutionAction::CacheFlag::CacheOutput);
+
+ // run failed action
+ auto failed = action->Execute(nullptr);
+ REQUIRE(failed);
+ CHECK_FALSE(failed->IsCached());
+ CHECK(failed->ExitCode() != 0);
+
+ REQUIRE(FileSystemManager::CreateFile(flag));
+
+ // run success action (should rerun and overwrite)
+ auto success = action->Execute(nullptr);
+ REQUIRE(success);
+ CHECK_FALSE(success->IsCached());
+ CHECK(success->ExitCode() == 0);
+
+ // rerun success action (should be served from cache)
+ auto cached = action->Execute(nullptr);
+ REQUIRE(cached);
+ CHECK(cached->IsCached());
+ CHECK(cached->ExitCode() == 0);
+
+ CHECK(FileSystemManager::RemoveFile(flag));
+}
diff --git a/test/buildtool/execution_api/local/local_storage.test.cpp b/test/buildtool/execution_api/local/local_storage.test.cpp
new file mode 100644
index 00000000..f332c9d9
--- /dev/null
+++ b/test/buildtool/execution_api/local/local_storage.test.cpp
@@ -0,0 +1,180 @@
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/execution_api/local/local_storage.hpp"
+#include "test/utils/hermeticity/local.hpp"
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalStorage: Add blob to storage from bytes",
+ "[execution_api]") {
+ std::string test_bytes("test");
+
+ LocalStorage storage{};
+ auto test_digest = ArtifactDigest::Create(test_bytes);
+
+ // check blob not in storage
+ CHECK(not storage.BlobPath(test_digest, true));
+ CHECK(not storage.BlobPath(test_digest, false));
+
+ // ensure previous calls did not accidentially create the blob
+ CHECK(not storage.BlobPath(test_digest, true));
+ CHECK(not storage.BlobPath(test_digest, false));
+
+ SECTION("Add non-executable blob to storage") {
+ CHECK(storage.StoreBlob(test_bytes, false));
+
+ auto file_path = storage.BlobPath(test_digest, false);
+ REQUIRE(file_path);
+ CHECK(FileSystemManager::IsFile(*file_path));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+
+ auto exe_path = storage.BlobPath(test_digest, true);
+ REQUIRE(exe_path);
+ CHECK(FileSystemManager::IsFile(*exe_path));
+ CHECK(FileSystemManager::IsExecutable(*exe_path, true));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+ }
+
+ SECTION("Add executable blob to storage") {
+ CHECK(storage.StoreBlob(test_bytes, true));
+
+ auto file_path = storage.BlobPath(test_digest, false);
+ REQUIRE(file_path);
+ CHECK(FileSystemManager::IsFile(*file_path));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+
+ auto exe_path = storage.BlobPath(test_digest, true);
+ REQUIRE(exe_path);
+ CHECK(FileSystemManager::IsFile(*exe_path));
+ CHECK(FileSystemManager::IsExecutable(*exe_path, true));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+ }
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalStorage: Add blob to storage from non-executable file",
+ "[execution_api]") {
+ std::filesystem::path non_exec_file{
+ "test/buildtool/execution_api/data/non_executable_file"};
+
+ LocalStorage storage{};
+ auto test_blob = CreateBlobFromFile(non_exec_file);
+ REQUIRE(test_blob);
+
+ // check blob not in storage
+ CHECK(not storage.BlobPath(test_blob->digest, true));
+ CHECK(not storage.BlobPath(test_blob->digest, false));
+
+ // ensure previous calls did not accidentially create the blob
+ CHECK(not storage.BlobPath(test_blob->digest, true));
+ CHECK(not storage.BlobPath(test_blob->digest, false));
+
+ SECTION("Add blob to storage without specifying x-bit") {
+ CHECK(storage.StoreBlob(non_exec_file));
+
+ auto file_path = storage.BlobPath(test_blob->digest, false);
+ REQUIRE(file_path);
+ CHECK(FileSystemManager::IsFile(*file_path));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+
+ auto exe_path = storage.BlobPath(test_blob->digest, true);
+ REQUIRE(exe_path);
+ CHECK(FileSystemManager::IsFile(*exe_path));
+ CHECK(FileSystemManager::IsExecutable(*exe_path, true));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+ }
+
+ SECTION("Add non-executable blob to storage") {
+ CHECK(storage.StoreBlob(non_exec_file, false));
+
+ auto file_path = storage.BlobPath(test_blob->digest, false);
+ REQUIRE(file_path);
+ CHECK(FileSystemManager::IsFile(*file_path));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+
+ auto exe_path = storage.BlobPath(test_blob->digest, true);
+ REQUIRE(exe_path);
+ CHECK(FileSystemManager::IsFile(*exe_path));
+ CHECK(FileSystemManager::IsExecutable(*exe_path, true));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+ }
+
+ SECTION("Add executable blob to storage") {
+ CHECK(storage.StoreBlob(non_exec_file, true));
+
+ auto file_path = storage.BlobPath(test_blob->digest, false);
+ REQUIRE(file_path);
+ CHECK(FileSystemManager::IsFile(*file_path));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+
+ auto exe_path = storage.BlobPath(test_blob->digest, true);
+ REQUIRE(exe_path);
+ CHECK(FileSystemManager::IsFile(*exe_path));
+ CHECK(FileSystemManager::IsExecutable(*exe_path, true));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+ }
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "LocalStorage: Add blob to storage from executable file",
+ "[execution_api]") {
+ std::filesystem::path exec_file{
+ "test/buildtool/execution_api/data/executable_file"};
+
+ LocalStorage storage{};
+ auto test_blob = CreateBlobFromFile(exec_file);
+ REQUIRE(test_blob);
+
+ // check blob not in storage
+ CHECK(not storage.BlobPath(test_blob->digest, true));
+ CHECK(not storage.BlobPath(test_blob->digest, false));
+
+ // ensure previous calls did not accidentially create the blob
+ CHECK(not storage.BlobPath(test_blob->digest, true));
+ CHECK(not storage.BlobPath(test_blob->digest, false));
+
+ SECTION("Add blob to storage without specifying x-bit") {
+ CHECK(storage.StoreBlob(exec_file));
+
+ auto file_path = storage.BlobPath(test_blob->digest, false);
+ REQUIRE(file_path);
+ CHECK(FileSystemManager::IsFile(*file_path));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+
+ auto exe_path = storage.BlobPath(test_blob->digest, true);
+ REQUIRE(exe_path);
+ CHECK(FileSystemManager::IsFile(*exe_path));
+ CHECK(FileSystemManager::IsExecutable(*exe_path, true));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+ }
+
+ SECTION("Add non-executable blob to storage") {
+ CHECK(storage.StoreBlob(exec_file, false));
+
+ auto file_path = storage.BlobPath(test_blob->digest, false);
+ REQUIRE(file_path);
+ CHECK(FileSystemManager::IsFile(*file_path));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+
+ auto exe_path = storage.BlobPath(test_blob->digest, true);
+ REQUIRE(exe_path);
+ CHECK(FileSystemManager::IsFile(*exe_path));
+ CHECK(FileSystemManager::IsExecutable(*exe_path, true));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+ }
+
+ SECTION("Add executable blob to storage") {
+ CHECK(storage.StoreBlob(exec_file, true));
+
+ auto file_path = storage.BlobPath(test_blob->digest, false);
+ REQUIRE(file_path);
+ CHECK(FileSystemManager::IsFile(*file_path));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+
+ auto exe_path = storage.BlobPath(test_blob->digest, true);
+ REQUIRE(exe_path);
+ CHECK(FileSystemManager::IsFile(*exe_path));
+ CHECK(FileSystemManager::IsExecutable(*exe_path, true));
+ CHECK(not FileSystemManager::IsExecutable(*file_path, true));
+ }
+}
diff --git a/test/buildtool/execution_api/local_tree_map.test.cpp b/test/buildtool/execution_api/local_tree_map.test.cpp
new file mode 100644
index 00000000..d8becced
--- /dev/null
+++ b/test/buildtool/execution_api/local_tree_map.test.cpp
@@ -0,0 +1,110 @@
+#include <string>
+#include <thread>
+#include <vector>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/execution_api/common/local_tree_map.hpp"
+#include "src/utils/cpp/atomic.hpp"
+
+namespace {
+
+[[nodiscard]] auto ToDigest(std::string const& s) {
+ return static_cast<bazel_re::Digest>(ArtifactDigest{s, 0});
+}
+
+[[nodiscard]] auto ToInfo(std::string const& s) {
+ return Artifact::ObjectInfo{ArtifactDigest{s, 0}, ObjectType::File};
+}
+
+} // namespace
+
+TEST_CASE("LocalTree: empty tree", "[execution_api]") {
+ LocalTreeMap tree_map{};
+
+ auto tree = tree_map.CreateTree();
+ CHECK(tree.size() == 0);
+ CHECK(std::all_of(
+ tree.begin(), tree.end(), [](auto /*unused*/) { return false; }));
+}
+
+TEST_CASE("LocalTree: first wins", "[execution_api]") {
+ LocalTreeMap tree_map{};
+
+ auto tree = tree_map.CreateTree();
+ CHECK(tree.AddInfo("foo", ToInfo("bar")));
+ CHECK(tree.AddInfo("foo", ToInfo("baz")));
+ CHECK(tree.size() == 1);
+ for (auto const& [path, oid] : tree) {
+ CHECK(oid->digest.hash() == "bar");
+ }
+}
+
+TEST_CASE("LocalTreeMap: first wins", "[execution_api]") {
+ LocalTreeMap tree_map{};
+
+ auto tree_1 = tree_map.CreateTree();
+ CHECK(tree_1.AddInfo("foo", ToInfo("bar")));
+
+ auto tree_2 = tree_map.CreateTree();
+ CHECK(tree_2.AddInfo("foo", ToInfo("baz")));
+
+ auto tree_id = ToDigest("tree");
+ CHECK(tree_map.AddTree(tree_id, std::move(tree_1))); // NOLINT
+ CHECK(tree_map.AddTree(tree_id, std::move(tree_2))); // NOLINT
+
+ CHECK(tree_map.HasTree(tree_id));
+
+ auto const* tree = tree_map.GetTree(tree_id);
+ REQUIRE(tree != nullptr);
+ CHECK(tree->size() == 1);
+ for (auto const& [path, oid] : *tree) {
+ CHECK(oid->digest.hash() == "bar");
+ }
+}
+
+TEST_CASE("LocalTreeMap: thread-safety", "[execution_api]") {
+ constexpr auto kNumThreads = 100;
+ constexpr auto kQ = 10;
+
+ atomic<bool> starting_signal{false};
+ std::vector<std::thread> threads{};
+ threads.reserve(kNumThreads);
+
+ LocalTreeMap tree_map{};
+
+ for (int id{}; id < kNumThreads; ++id) {
+ threads.emplace_back(
+ [&tree_map, &starting_signal](int tid) {
+ auto entry_id = std::to_string(tid);
+ auto tree = tree_map.CreateTree();
+ REQUIRE(tree.AddInfo(entry_id, ToInfo(entry_id)));
+
+ auto tree_id = ToDigest(std::to_string(tid / kQ));
+ starting_signal.wait(false);
+
+ // kQ-many threads try to add tree with same id
+ REQUIRE(tree_map.AddTree(tree_id, std::move(tree))); // NOLINT
+ },
+ id);
+ }
+
+ starting_signal = true;
+ starting_signal.notify_all();
+ for (auto& thread : threads) {
+ thread.join();
+ }
+
+ for (int id{}; id <= (kNumThreads - 1) / kQ; ++id) {
+ auto tree_id = ToDigest(std::to_string(id));
+ CHECK(tree_map.HasTree(tree_id));
+
+ auto const* tree = tree_map.GetTree(tree_id);
+ REQUIRE(tree != nullptr);
+ CHECK(tree->size() == 1);
+ for (auto const& [path, oid] : *tree) {
+ auto entry_id = std::stoi(oid->digest.hash());
+ CHECK(entry_id >= id * kQ);
+ CHECK(entry_id < (id + 1) * kQ);
+ }
+ }
+}
diff --git a/test/buildtool/execution_engine/TARGETS b/test/buildtool/execution_engine/TARGETS
new file mode 100644
index 00000000..26472eda
--- /dev/null
+++ b/test/buildtool/execution_engine/TARGETS
@@ -0,0 +1,10 @@
+{ "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "dirs":
+ [ [["./", "dag", "TESTS"], "dag"]
+ , [["./", "executor", "TESTS"], "executor"]
+ , [["./", "traverser", "TESTS"], "traverser"]
+ ]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/execution_engine/dag/TARGETS b/test/buildtool/execution_engine/dag/TARGETS
new file mode 100644
index 00000000..5e63d72f
--- /dev/null
+++ b/test/buildtool/execution_engine/dag/TARGETS
@@ -0,0 +1,15 @@
+{ "dag":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["dag"]
+ , "srcs": ["dag.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/common", "artifact_factory"]
+ , ["src/buildtool/execution_engine/dag", "dag"]
+ ]
+ , "stage": ["test", "buildtool", "execution_engine", "dag"]
+ }
+, "TESTS": {"type": "install", "tainted": ["test"], "deps": ["dag"]}
+} \ No newline at end of file
diff --git a/test/buildtool/execution_engine/dag/dag.test.cpp b/test/buildtool/execution_engine/dag/dag.test.cpp
new file mode 100644
index 00000000..0da28a0a
--- /dev/null
+++ b/test/buildtool/execution_engine/dag/dag.test.cpp
@@ -0,0 +1,293 @@
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/artifact_factory.hpp"
+#include "src/buildtool/execution_engine/dag/dag.hpp"
+#include "test/utils/container_matchers.hpp"
+
+/// \brief Checks that each artifact with identifier in output_ids has been
+/// added to the graph and that its builder action has id action_id, and that
+/// all outputs of actions are those the ids of which are listed in output_ids
+void CheckOutputNodesCorrectlyAdded(
+ DependencyGraph const& g,
+ ActionIdentifier const& action_id,
+ std::vector<std::string> const& output_paths) {
+ std::vector<ArtifactIdentifier> output_ids;
+ for (auto const& path : output_paths) {
+ auto const output_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact(action_id, path));
+ CHECK(g.ArtifactWithId(output_id));
+ auto const* action = g.ActionNodeOfArtifactWithId(output_id);
+ CHECK(action != nullptr);
+ CHECK(action->Content().Id() == action_id);
+ output_ids.push_back(output_id);
+ }
+ CHECK_THAT(
+ g.ActionNodeWithId(action_id)->OutputFileIds(),
+ HasSameUniqueElementsAs<std::vector<ArtifactIdentifier>>(output_ids));
+}
+
+/// \brief Checks that the artifacts with ids in inputs_ids are in the graph and
+/// coincide with the action's dependencies
+void CheckInputNodesCorrectlyAdded(
+ DependencyGraph const& g,
+ ActionIdentifier const& action_id,
+ std::vector<ArtifactIdentifier> const& input_ids) noexcept {
+ for (auto const& input_id : input_ids) {
+ CHECK(g.ArtifactWithId(input_id));
+ }
+ CHECK_THAT(
+ g.ActionNodeWithId(action_id)->DependencyIds(),
+ HasSameUniqueElementsAs<std::vector<ArtifactIdentifier>>(input_ids));
+}
+
+/// \brief Checks that the artifacts have been added as local artifact and their
+/// local path is correct
+void CheckLocalArtifactsCorrectlyAdded(
+ DependencyGraph const& g,
+ std::vector<ArtifactIdentifier> const& ids,
+ std::vector<std::string> const& paths) noexcept {
+ REQUIRE(ids.size() == paths.size());
+ for (std::size_t pos = 0; pos < ids.size(); ++pos) {
+ auto const* artifact_node = g.ArtifactNodeWithId(ids[pos]);
+ CHECK(artifact_node != nullptr);
+ CHECK(not artifact_node->HasBuilderAction());
+ CHECK(artifact_node->Content().FilePath() == paths[pos]);
+ }
+}
+
+TEST_CASE("Empty Dependency Graph", "[dag]") {
+ DependencyGraph g;
+ CHECK(g.IsValid());
+}
+
+TEST_CASE("AddAction({single action, single output, no inputs})", "[dag]") {
+ std::string const action_id = "action_id";
+ auto const action_description = ActionDescription{
+ {"out"}, {}, Action{action_id, {"touch", "out"}, {}}, {}};
+ DependencyGraph g;
+ CHECK(g.AddAction(action_description));
+ CheckOutputNodesCorrectlyAdded(g, action_id, {"out"});
+ CHECK(g.IsValid());
+}
+
+TEST_CASE("AddAction({single action, more outputs, no inputs})", "[dag]") {
+ std::string const action_id = "action_id";
+ std::vector<std::string> const output_files = {"out0", "out1", "out2"};
+ auto const action_description = ActionDescription{
+ output_files,
+ {},
+ Action{action_id, {"touch", "out0", "out1", "out2"}, {}},
+ {}};
+ DependencyGraph g;
+ CHECK(g.AddAction(action_description));
+ CheckOutputNodesCorrectlyAdded(g, action_id, output_files);
+ CHECK(g.IsValid());
+}
+
+TEST_CASE("AddAction({single action, single output, source file})", "[dag]") {
+ using path = std::filesystem::path;
+ std::string const action_id = "action_id";
+ auto const src_description = ArtifactDescription{path{"main.cpp"}, "repo"};
+ auto const src_id = src_description.Id();
+ DependencyGraph g;
+ SECTION("Input file in the same path than it is locally") {
+ auto const action_description =
+ ActionDescription{{"executable"},
+ {},
+ Action{action_id, {"gcc", "main.cpp"}, {}},
+ {{"main.cpp", src_description}}};
+ CHECK(g.AddAction(action_description));
+ }
+ SECTION("Input file in different path from the local one") {
+ auto const action_description =
+ ActionDescription{{"executable"},
+ {},
+ Action{action_id, {"gcc", "src/a.cpp"}, {}},
+ {{"src/a.cpp", src_description}}};
+ CHECK(g.Add({action_description}));
+ }
+
+ CheckOutputNodesCorrectlyAdded(g, action_id, {"executable"});
+ CheckInputNodesCorrectlyAdded(g, action_id, {src_id});
+
+ // Now we check that the src file artifact was added with the correct path
+ CheckLocalArtifactsCorrectlyAdded(g, {src_id}, {"main.cpp"});
+
+ // All artifacts are the source file and the executable
+ CHECK_THAT(g.ArtifactIdentifiers(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ {src_id,
+ ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact(
+ action_id, "executable"))}));
+ CHECK(g.IsValid());
+}
+
+TEST_CASE("AddAction({single action, single output, no inputs, env_variables})",
+ "[dag]") {
+ std::string const action_id = "action_id";
+ std::string const name = "World";
+ DependencyGraph g;
+ std::vector<std::string> const command{
+ "/bin/sh", "-c", "set -e\necho 'Hello, ${NAME}' > greeting"};
+ nlohmann::json const env_vars{{"NAME", name}};
+ auto const action_description = ActionDescription{
+ {"greeting"}, {}, Action{action_id, command, env_vars}, {}};
+
+ CHECK(g.AddAction(action_description));
+
+ CheckOutputNodesCorrectlyAdded(g, action_id, {"greeting"});
+ CheckInputNodesCorrectlyAdded(g, action_id, {});
+
+ auto const* const action_node = g.ActionNodeWithId(action_id);
+ CHECK(action_node != nullptr);
+ CHECK(action_node->Command() == command);
+ CHECK(action_node->Env() == env_vars);
+
+ // All artifacts are the output file
+ CHECK_THAT(g.ArtifactIdentifiers(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ {ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact(action_id,
+ "greeting"))}));
+ CHECK(g.IsValid());
+}
+
+TEST_CASE("Add executable and library", "[dag]") {
+ // Note: we don't use local bindings for members of pair as output of
+ // functions because it seems to be problematic with Catch2's macros inside
+ // lambdas and we want to use lambdas here to avoid repetition
+ using path = std::filesystem::path;
+ std::string const make_exec_id = "make_exe";
+ std::string const make_lib_id = "make_lib";
+ std::vector<std::string> const make_exec_cmd = {"build", "exec"};
+ std::vector<std::string> const make_lib_cmd = {"build", "lib.a"};
+ auto const main_desc = ArtifactDescription{path{"main.cpp"}, ""};
+ auto const main_id = main_desc.Id();
+ auto const lib_hpp_desc = ArtifactDescription{path{"lib/lib.hpp"}, ""};
+ auto const lib_hpp_id = lib_hpp_desc.Id();
+ auto const lib_cpp_desc = ArtifactDescription{path{"lib/lib.cpp"}, ""};
+ auto const lib_cpp_id = lib_cpp_desc.Id();
+ auto const lib_a_desc = ArtifactDescription{make_lib_id, "lib.a"};
+ auto const lib_a_id = lib_a_desc.Id();
+
+ auto const make_exec_desc =
+ ActionDescription{{"exec"},
+ {},
+ Action{make_exec_id, make_exec_cmd, {}},
+ {{"main.cpp", main_desc}, {"lib.a", lib_a_desc}}};
+ auto const exec_out_id = ArtifactDescription{make_exec_id, "exec"}.Id();
+
+ auto const make_lib_desc = ActionDescription{
+ {"lib.a"},
+ {},
+ Action{make_lib_id, make_lib_cmd, {}},
+ {{"lib.hpp", lib_hpp_desc}, {"lib.cpp", lib_cpp_desc}}};
+
+ DependencyGraph g;
+ auto check_exec = [&]() {
+ CHECK(g.IsValid());
+ CheckOutputNodesCorrectlyAdded(g, make_exec_id, {"exec"});
+ CheckInputNodesCorrectlyAdded(g, make_exec_id, {main_id, lib_a_id});
+ CheckLocalArtifactsCorrectlyAdded(g, {main_id}, {"main.cpp"});
+ CHECK_THAT(g.ActionNodeOfArtifactWithId(exec_out_id)->Command(),
+ Catch::Matchers::Equals(make_exec_cmd));
+ };
+
+ auto check_lib = [&]() {
+ CHECK(g.IsValid());
+ CheckOutputNodesCorrectlyAdded(g, make_lib_id, {"lib.a"});
+ CheckInputNodesCorrectlyAdded(g, make_lib_id, {lib_hpp_id, lib_cpp_id});
+ CheckLocalArtifactsCorrectlyAdded(
+ g, {lib_hpp_id, lib_cpp_id}, {"lib/lib.hpp", "lib/lib.cpp"});
+ CHECK_THAT(g.ActionNodeOfArtifactWithId(lib_a_id)->Command(),
+ Catch::Matchers::Equals(make_lib_cmd));
+ };
+
+ SECTION("First exec, then lib") {
+ CHECK(g.AddAction(make_exec_desc));
+ check_exec();
+ CHECK(g.AddAction(make_lib_desc));
+ check_lib();
+ }
+
+ SECTION("First lib, then exec") {
+ CHECK(g.AddAction(make_lib_desc));
+ check_lib();
+ CHECK(g.AddAction(make_exec_desc));
+ check_exec();
+ }
+
+ SECTION("Add both with single call to `DependencyGraph::Add`") {
+ CHECK(g.Add({make_exec_desc, make_lib_desc}));
+ check_exec();
+ check_lib();
+ }
+
+ CHECK_THAT(g.ArtifactIdentifiers(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ {main_id, exec_out_id, lib_a_id, lib_hpp_id, lib_cpp_id}));
+}
+
+// Incorrect action description tests
+
+TEST_CASE("AddAction(id, empty action description) fails", "[dag]") {
+ DependencyGraph g;
+ CHECK(not g.AddAction(ActionDescription{{}, {}, Action{"id", {}, {}}, {}}));
+}
+
+TEST_CASE("AddAction(Empty mandatory non-empty field in action description)",
+ "[dag]") {
+ DependencyGraph g;
+ CHECK(not g.AddAction(ActionDescription{
+ {"output0", "output1"}, {}, Action{"empty command", {}, {}}, {}}));
+ CHECK(not g.AddAction(ActionDescription{
+ {}, {}, Action{"empty output", {"echo", "hello"}, {}}, {}}));
+}
+
+// Collision between actions tests
+
+TEST_CASE("Adding cyclic dependencies produces invalid graph", "[dag]") {
+ std::string const action1_id = "action1";
+ std::string const action2_id = "action2";
+ auto const out1_desc = ArtifactDescription(action1_id, "out1");
+ auto const out1_id = out1_desc.Id();
+ auto const out2_desc = ArtifactDescription(action2_id, "out2");
+ auto const out2_id = out2_desc.Id();
+
+ auto const action1_desc =
+ ActionDescription{{"out1"},
+ {},
+ Action{action1_id, {"touch", "out1"}, {}},
+ {{"dep", out2_desc}}};
+ auto const action2_desc =
+ ActionDescription{{"out2"},
+ {},
+ Action{action2_id, {"touch", "out2"}, {}},
+ {{"dep", out1_desc}}};
+
+ DependencyGraph g;
+ CHECK(g.Add({action1_desc, action2_desc}));
+ CHECK(not g.IsValid());
+}
+
+TEST_CASE("Error when adding an action with an id already added", "[dag]") {
+ std::string const action_id = "id";
+ auto const action_desc =
+ ActionDescription{{"out"}, {}, Action{"id", {"touch", "out"}, {}}, {}};
+
+ DependencyGraph g;
+ CHECK(g.AddAction(action_desc));
+ CheckOutputNodesCorrectlyAdded(g, action_id, {"out"});
+ CHECK(g.IsValid());
+
+ CHECK(not g.AddAction(action_desc));
+}
+
+TEST_CASE("Error when adding conflicting output files and directories",
+ "[dag]") {
+ auto const action_desc = ActionDescription{
+ {"out"}, {"out"}, Action{"id", {"touch", "out"}, {}}, {}};
+
+ DependencyGraph g;
+ CHECK_FALSE(g.AddAction(action_desc));
+}
diff --git a/test/buildtool/execution_engine/executor/TARGETS b/test/buildtool/execution_engine/executor/TARGETS
new file mode 100644
index 00000000..9bf1c50e
--- /dev/null
+++ b/test/buildtool/execution_engine/executor/TARGETS
@@ -0,0 +1,71 @@
+{ "executor_api_tests":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["executor_api_tests"]
+ , "hdrs": ["executor_api.test.hpp"]
+ , "stage": ["test", "buildtool", "execution_engine", "executor"]
+ }
+, "executor":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["executor"]
+ , "srcs": ["executor.test.cpp"]
+ , "deps":
+ [ ["src/buildtool/common", "artifact_factory"]
+ , ["src/buildtool/execution_api/common", "common"]
+ , ["src/buildtool/execution_engine/dag", "dag"]
+ , ["src/buildtool/execution_engine/executor", "executor"]
+ , ["test", "catch-main"]
+ , ["@", "catch2", "", "catch2"]
+ ]
+ , "stage": ["test", "buildtool", "execution_engine", "executor"]
+ }
+, "local":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["local"]
+ , "srcs": ["executor_api_local.test.cpp"]
+ , "data": ["test_data"]
+ , "deps":
+ [ "executor_api_tests"
+ , ["src/buildtool/common", "artifact_factory"]
+ , ["src/buildtool/execution_api/local", "local"]
+ , ["src/buildtool/execution_api/remote", "config"]
+ , ["src/buildtool/execution_engine/dag", "dag"]
+ , ["src/buildtool/execution_engine/executor", "executor"]
+ , ["test/utils", "catch-main-remote-execution"]
+ , ["test/utils", "local_hermeticity"]
+ , ["@", "catch2", "", "catch2"]
+ ]
+ , "stage": ["test", "buildtool", "execution_engine", "executor"]
+ }
+, "remote_bazel":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["remote_bazel"]
+ , "srcs": ["executor_api_remote_bazel.test.cpp"]
+ , "data": ["test_data"]
+ , "deps":
+ [ "executor_api_tests"
+ , ["src/buildtool/common", "artifact_factory"]
+ , ["src/buildtool/execution_api/remote", "bazel"]
+ , ["src/buildtool/execution_api/remote", "config"]
+ , ["src/buildtool/execution_engine/executor", "executor"]
+ , ["test/utils", "catch-main-remote-execution"]
+ , ["@", "catch2", "", "catch2"]
+ ]
+ , "stage": ["test", "buildtool", "execution_engine", "executor"]
+ }
+, "test_data":
+ { "type": ["@", "rules", "data", "staged"]
+ , "srcs":
+ [ "data/greeter/greet.cpp"
+ , "data/greeter/greet.hpp"
+ , "data/greeter/greet_mod.cpp"
+ , "data/greeter/main.cpp"
+ , "data/hello_world/main.cpp"
+ ]
+ , "stage": ["test", "buildtool", "execution_engine", "executor"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps": ["executor", "local", "remote_bazel"]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/execution_engine/executor/data/greeter/greet.cpp b/test/buildtool/execution_engine/executor/data/greeter/greet.cpp
new file mode 100644
index 00000000..f1a1cf6b
--- /dev/null
+++ b/test/buildtool/execution_engine/executor/data/greeter/greet.cpp
@@ -0,0 +1,6 @@
+#include <iostream>
+#include "greet.hpp"
+
+void greet(std::string const& name) {
+ std::cout << "Hello " << name << std::endl;
+}
diff --git a/test/buildtool/execution_engine/executor/data/greeter/greet.hpp b/test/buildtool/execution_engine/executor/data/greeter/greet.hpp
new file mode 100644
index 00000000..d4cb767d
--- /dev/null
+++ b/test/buildtool/execution_engine/executor/data/greeter/greet.hpp
@@ -0,0 +1,3 @@
+#include <string>
+
+void greet(std::string const& name);
diff --git a/test/buildtool/execution_engine/executor/data/greeter/greet_mod.cpp b/test/buildtool/execution_engine/executor/data/greeter/greet_mod.cpp
new file mode 100644
index 00000000..550a7bf8
--- /dev/null
+++ b/test/buildtool/execution_engine/executor/data/greeter/greet_mod.cpp
@@ -0,0 +1,8 @@
+#include <iostream>
+#include "greet.hpp"
+
+// this is a modification that has no effect on the produced binary
+
+void greet(std::string const& name) {
+ std::cout << "Hello " << name << std::endl;
+}
diff --git a/test/buildtool/execution_engine/executor/data/greeter/main.cpp b/test/buildtool/execution_engine/executor/data/greeter/main.cpp
new file mode 100644
index 00000000..4d51ee4a
--- /dev/null
+++ b/test/buildtool/execution_engine/executor/data/greeter/main.cpp
@@ -0,0 +1,6 @@
+#include "greet.hpp"
+
+int main(void) {
+ greet("devcloud");
+ return 0;
+}
diff --git a/test/buildtool/execution_engine/executor/data/hello_world/main.cpp b/test/buildtool/execution_engine/executor/data/hello_world/main.cpp
new file mode 100644
index 00000000..f7eb16a1
--- /dev/null
+++ b/test/buildtool/execution_engine/executor/data/hello_world/main.cpp
@@ -0,0 +1,6 @@
+#include <iostream>
+
+int main(void) {
+ std::cout << "Hello World!" << std::endl;
+ return 0;
+}
diff --git a/test/buildtool/execution_engine/executor/executor.test.cpp b/test/buildtool/execution_engine/executor/executor.test.cpp
new file mode 100755
index 00000000..63f41521
--- /dev/null
+++ b/test/buildtool/execution_engine/executor/executor.test.cpp
@@ -0,0 +1,358 @@
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/artifact_factory.hpp"
+#include "src/buildtool/execution_api/common/execution_api.hpp"
+#include "src/buildtool/execution_engine/executor/executor.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+
+/// \brief Mockup API test config.
+struct TestApiConfig {
+ struct TestArtifactConfig {
+ bool uploads{};
+ bool available{};
+ };
+
+ struct TestExecutionConfig {
+ bool failed{};
+ std::vector<std::string> outputs{};
+ };
+
+ struct TestResponseConfig {
+ bool cached{};
+ int exit_code{};
+ };
+
+ std::unordered_map<std::string, TestArtifactConfig> artifacts{};
+ TestExecutionConfig execution;
+ TestResponseConfig response;
+};
+
+// forward declarations
+class TestApi;
+class TestAction;
+class TestResponse;
+
+/// \brief Mockup Response, stores only config and action result
+class TestResponse : public IExecutionResponse {
+ friend class TestAction;
+
+ public:
+ [[nodiscard]] auto Status() const noexcept -> StatusCode final {
+ return StatusCode::Success;
+ }
+ [[nodiscard]] auto ExitCode() const noexcept -> int final {
+ return config_.response.exit_code;
+ }
+ [[nodiscard]] auto IsCached() const noexcept -> bool final {
+ return config_.response.cached;
+ }
+ [[nodiscard]] auto HasStdErr() const noexcept -> bool final { return true; }
+ [[nodiscard]] auto HasStdOut() const noexcept -> bool final { return true; }
+ [[nodiscard]] auto StdErr() noexcept -> std::string final { return {}; }
+ [[nodiscard]] auto StdOut() noexcept -> std::string final { return {}; }
+ [[nodiscard]] auto ActionDigest() const noexcept -> std::string final {
+ return {};
+ }
+ [[nodiscard]] auto Artifacts() const noexcept -> ArtifactInfos final {
+ ArtifactInfos artifacts{};
+ artifacts.reserve(config_.execution.outputs.size());
+
+ // collect files and store them
+ for (auto const& path : config_.execution.outputs) {
+ try {
+ artifacts.emplace(path,
+ Artifact::ObjectInfo{ArtifactDigest{path, 0},
+ ObjectType::File});
+ } catch (...) {
+ return {};
+ }
+ }
+
+ return artifacts;
+ }
+
+ private:
+ TestApiConfig config_{};
+ explicit TestResponse(TestApiConfig config) noexcept
+ : config_{std::move(config)} {}
+};
+
+/// \brief Mockup Action, stores only config
+class TestAction : public IExecutionAction {
+ friend class TestApi;
+
+ public:
+ auto Execute(Logger const* /*unused*/) noexcept
+ -> IExecutionResponse::Ptr final {
+ if (config_.execution.failed) {
+ return nullptr;
+ }
+ return IExecutionResponse::Ptr{new TestResponse{config_}};
+ }
+ void SetCacheFlag(CacheFlag /*unused*/) noexcept final {}
+ void SetTimeout(std::chrono::milliseconds /*unused*/) noexcept final {}
+
+ private:
+ TestApiConfig config_{};
+ explicit TestAction(TestApiConfig config) noexcept
+ : config_{std::move(config)} {}
+};
+
+/// \brief Mockup Api, use config to create action and handle artifact upload
+class TestApi : public IExecutionApi {
+ public:
+ explicit TestApi(TestApiConfig config) noexcept
+ : config_{std::move(config)} {}
+
+ auto CreateAction(
+ ArtifactDigest const& /*unused*/,
+ std::vector<std::string> const& /*unused*/,
+ std::vector<std::string> const& /*unused*/,
+ std::vector<std::string> const& /*unused*/,
+ std::map<std::string, std::string> const& /*unused*/,
+ std::map<std::string, std::string> const& /*unused*/) noexcept
+ -> IExecutionAction::Ptr final {
+ return IExecutionAction::Ptr{new TestAction(config_)};
+ }
+ auto RetrieveToPaths(
+ std::vector<Artifact::ObjectInfo> const& /*unused*/,
+ std::vector<std::filesystem::path> const& /*unused*/) noexcept
+ -> bool final {
+ return false; // not needed by Executor
+ }
+ auto RetrieveToFds(std::vector<Artifact::ObjectInfo> const& /*unused*/,
+ std::vector<int> const& /*unused*/) noexcept
+ -> bool final {
+ return false; // not needed by Executor
+ }
+ auto Upload(BlobContainer const& blobs, bool /*unused*/) noexcept
+ -> bool final {
+ for (auto const& blob : blobs) {
+ if (config_.artifacts[blob.data].uploads) {
+ continue; // for local artifacts
+ }
+ if (config_.artifacts[blob.digest.hash()].uploads) {
+ continue; // for known and action artifacts
+ }
+ return false;
+ }
+ return true;
+ }
+ auto UploadTree(
+ std::vector<
+ DependencyGraph::NamedArtifactNodePtr> const& /*unused*/) noexcept
+ -> std::optional<ArtifactDigest> final {
+ return ArtifactDigest{}; // not needed by Executor
+ }
+ [[nodiscard]] auto IsAvailable(ArtifactDigest const& digest) const noexcept
+ -> bool final {
+ try {
+ return config_.artifacts.at(digest.hash()).available;
+ } catch (std::exception const& /* unused */) {
+ return false;
+ }
+ }
+
+ private:
+ TestApiConfig config_{};
+};
+
+static void SetupConfig(std::filesystem::path const& ws) {
+ auto info = RepositoryConfig::RepositoryInfo{FileRoot{ws}};
+ RepositoryConfig::Instance().Reset();
+ RepositoryConfig::Instance().SetInfo("", std::move(info));
+}
+
+[[nodiscard]] static auto CreateTest(gsl::not_null<DependencyGraph*> const& g,
+ std::filesystem::path const& ws)
+ -> TestApiConfig {
+ using path = std::filesystem::path;
+ SetupConfig(ws);
+
+ auto const local_cpp_desc = ArtifactDescription{path{"local.cpp"}, ""};
+ auto const local_cpp_id = local_cpp_desc.Id();
+
+ auto const known_cpp_desc =
+ ArtifactDescription{ArtifactDigest{"known.cpp", 0}, ObjectType::File};
+ auto const known_cpp_id = known_cpp_desc.Id();
+
+ auto const test_action_desc = ActionDescription{
+ {"output1.exe", "output2.exe"},
+ {},
+ Action{"test_action", {"cmd", "line"}, {}},
+ {{"local.cpp", local_cpp_desc}, {"known.cpp", known_cpp_desc}}};
+
+ CHECK(g->AddAction(test_action_desc));
+ CHECK(FileSystemManager::WriteFile("local.cpp", ws / "local.cpp"));
+
+ TestApiConfig config{};
+
+ config.artifacts["local.cpp"].uploads = true;
+ config.artifacts["known.cpp"].available = true;
+ config.artifacts["output1.exe"].available = true;
+ config.artifacts["output2.exe"].available = true;
+
+ config.execution.failed = false;
+ config.execution.outputs = {"output1.exe", "output2.exe"};
+
+ config.response.cached = true;
+ config.response.exit_code = 0;
+
+ return config;
+}
+
+TEST_CASE("Executor: Process artifact", "[executor]") {
+ std::filesystem::path workspace_path{
+ "test/buildtool/execution_engine/executor"};
+ DependencyGraph g;
+ auto config = CreateTest(&g, workspace_path);
+
+ auto const local_cpp_desc =
+ ArtifactFactory::DescribeLocalArtifact("local.cpp", "");
+ auto const local_cpp_id = ArtifactFactory::Identifier(local_cpp_desc);
+
+ auto const known_cpp_desc = ArtifactFactory::DescribeKnownArtifact(
+ "known.cpp", 0, ObjectType::File);
+ auto const known_cpp_id = ArtifactFactory::Identifier(known_cpp_desc);
+
+ SECTION("Processing succeeds for valid config") {
+ auto api = TestApi::Ptr{new TestApi{config}};
+ Executor runner{api.get(), {}};
+
+ CHECK(runner.Process(g.ArtifactNodeWithId(local_cpp_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(known_cpp_id)));
+ }
+
+ SECTION("Processing fails if uploading local artifact failed") {
+ config.artifacts["local.cpp"].uploads = false;
+
+ auto api = TestApi::Ptr{new TestApi{config}};
+ Executor runner{api.get(), {}};
+
+ CHECK(not runner.Process(g.ArtifactNodeWithId(local_cpp_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(known_cpp_id)));
+ }
+
+ SECTION("Processing fails if known artifact is not available") {
+ config.artifacts["known.cpp"].available = false;
+
+ auto api = TestApi::Ptr{new TestApi{config}};
+ Executor runner{api.get(), {}};
+
+ CHECK(runner.Process(g.ArtifactNodeWithId(local_cpp_id)));
+ CHECK(not runner.Process(g.ArtifactNodeWithId(known_cpp_id)));
+ }
+}
+
+TEST_CASE("Executor: Process action", "[executor]") {
+ std::filesystem::path workspace_path{
+ "test/buildtool/execution_engine/executor"};
+
+ DependencyGraph g;
+ auto config = CreateTest(&g, workspace_path);
+
+ auto const local_cpp_desc =
+ ArtifactFactory::DescribeLocalArtifact("local.cpp", "");
+ auto const local_cpp_id = ArtifactFactory::Identifier(local_cpp_desc);
+
+ auto const known_cpp_desc = ArtifactFactory::DescribeKnownArtifact(
+ "known.cpp", 0, ObjectType::File);
+ auto const known_cpp_id = ArtifactFactory::Identifier(known_cpp_desc);
+
+ ActionIdentifier action_id{"test_action"};
+ auto const output1_desc =
+ ArtifactFactory::DescribeActionArtifact(action_id, "output1.exe");
+ auto const output1_id = ArtifactFactory::Identifier(output1_desc);
+
+ auto const output2_desc =
+ ArtifactFactory::DescribeActionArtifact(action_id, "output2.exe");
+ auto const output2_id = ArtifactFactory::Identifier(output2_desc);
+
+ SECTION("Processing succeeds for valid config") {
+ auto api = TestApi::Ptr{new TestApi{config}};
+ Executor runner{api.get(), {}};
+
+ CHECK(runner.Process(g.ArtifactNodeWithId(local_cpp_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(known_cpp_id)));
+ CHECK(runner.Process(g.ActionNodeWithId(action_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(output1_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(output2_id)));
+ }
+
+ SECTION("Processing succeeds even if result was is not cached") {
+ config.response.cached = false;
+
+ auto api = TestApi::Ptr{new TestApi{config}};
+ Executor runner{api.get(), {}};
+
+ CHECK(runner.Process(g.ArtifactNodeWithId(local_cpp_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(known_cpp_id)));
+ CHECK(runner.Process(g.ActionNodeWithId(action_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(output1_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(output2_id)));
+ }
+
+ SECTION("Processing succeeds even if output is not available in CAS") {
+ config.artifacts["output2.exe"].available = false;
+
+ auto api = TestApi::Ptr{new TestApi{config}};
+ Executor runner{api.get(), {}};
+
+ CHECK(runner.Process(g.ArtifactNodeWithId(local_cpp_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(known_cpp_id)));
+ CHECK(runner.Process(g.ActionNodeWithId(action_id)));
+
+ // Note: Both output digests should be created via SaveDigests(),
+ // but processing output2.exe fails as it is not available in CAS.
+ CHECK(runner.Process(g.ArtifactNodeWithId(output1_id)));
+ CHECK(not runner.Process(g.ArtifactNodeWithId(output2_id)));
+ }
+
+ SECTION("Processing fails if execution failed") {
+ config.execution.failed = true;
+
+ auto api = TestApi::Ptr{new TestApi{config}};
+ Executor runner{api.get(), {}};
+
+ CHECK(runner.Process(g.ArtifactNodeWithId(local_cpp_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(known_cpp_id)));
+ CHECK(not runner.Process(g.ActionNodeWithId(action_id)));
+ CHECK(not runner.Process(g.ArtifactNodeWithId(output1_id)));
+ CHECK(not runner.Process(g.ArtifactNodeWithId(output2_id)));
+ }
+
+ SECTION("Processing fails if exit code is non-zero") {
+ config.response.exit_code = 1;
+
+ auto api = TestApi::Ptr{new TestApi{config}};
+ Executor runner{api.get(), {}};
+
+ CHECK(runner.Process(g.ArtifactNodeWithId(local_cpp_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(known_cpp_id)));
+ CHECK(not runner.Process(g.ActionNodeWithId(action_id)));
+
+ // Note: Both output digests should be missing as SaveDigests() for
+ // both is only called if processing action succeeds.
+ CHECK(not runner.Process(g.ArtifactNodeWithId(output1_id)));
+ CHECK(not runner.Process(g.ArtifactNodeWithId(output2_id)));
+ }
+
+ SECTION("Processing fails if any output is missing") {
+ config.execution.outputs = {"output1.exe" /*, "output2.exe"*/};
+
+ auto api = TestApi::Ptr{new TestApi{config}};
+ Executor runner{api.get(), {}};
+
+ CHECK(runner.Process(g.ArtifactNodeWithId(local_cpp_id)));
+ CHECK(runner.Process(g.ArtifactNodeWithId(known_cpp_id)));
+ CHECK(not runner.Process(g.ActionNodeWithId(action_id)));
+
+ // Note: Both output digests should be missing as SaveDigests() for
+ // both is only called if processing action succeeds.
+ CHECK(not runner.Process(g.ArtifactNodeWithId(output1_id)));
+ CHECK(not runner.Process(g.ArtifactNodeWithId(output2_id)));
+ }
+}
diff --git a/test/buildtool/execution_engine/executor/executor_api.test.hpp b/test/buildtool/execution_engine/executor/executor_api.test.hpp
new file mode 100755
index 00000000..48d37d98
--- /dev/null
+++ b/test/buildtool/execution_engine/executor/executor_api.test.hpp
@@ -0,0 +1,615 @@
+#ifndef INCLUDED_SRC_TEST_BUILDTOOL_EXECUTION_ENGINE_EXECUTOR_EXECUTOR_API_TEST_HPP
+#define INCLUDED_SRC_TEST_BUILDTOOL_EXECUTION_ENGINE_EXECUTOR_EXECUTOR_API_TEST_HPP
+
+#include <functional>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/artifact.hpp"
+#include "src/buildtool/common/artifact_description.hpp"
+#include "src/buildtool/common/artifact_factory.hpp"
+#include "src/buildtool/execution_api/common/execution_api.hpp"
+#include "src/buildtool/execution_engine/dag/dag.hpp"
+#include "src/buildtool/execution_engine/executor/executor.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "test/utils/test_env.hpp"
+
+using ApiFactory = std::function<IExecutionApi::Ptr()>;
+
+static inline void SetupConfig() {
+ auto info = RepositoryConfig::RepositoryInfo{
+ FileRoot{"test/buildtool/execution_engine/executor"}};
+ RepositoryConfig::Instance().SetInfo("", std::move(info));
+}
+
+static inline void RunBlobUpload(ApiFactory const& factory) {
+ SetupConfig();
+ auto api = factory();
+ std::string const blob = "test";
+ CHECK(api->Upload(BlobContainer{
+ {BazelBlob{ArtifactDigest{ComputeHash(blob), blob.size()}, blob}}}));
+}
+
+[[nodiscard]] static inline auto GetTestDir() -> std::filesystem::path {
+ auto* tmp_dir = std::getenv("TEST_TMPDIR");
+ if (tmp_dir != nullptr) {
+ return tmp_dir;
+ }
+ return FileSystemManager::GetCurrentDirectory() /
+ "test/buildtool/execution_engine/executor";
+}
+
+template <class Executor>
+[[nodiscard]] static inline auto AddAndProcessTree(DependencyGraph* g,
+ Executor* runner,
+ Tree const& tree_desc)
+ -> std::optional<Artifact::ObjectInfo> {
+ REQUIRE(g->AddAction(tree_desc.Action()));
+
+ // obtain tree action and tree artifact
+ auto const* tree_action = g->ActionNodeWithId(tree_desc.Id());
+ REQUIRE_FALSE(tree_action == nullptr);
+ auto const* tree_artifact = g->ArtifactNodeWithId(tree_desc.Output().Id());
+ REQUIRE_FALSE(tree_artifact == nullptr);
+
+ // "run" tree action to produce tree artifact
+ REQUIRE(runner->Process(tree_action));
+
+ // read computed tree artifact info (digest + object type)
+ return tree_artifact->Content().Info();
+}
+
+static inline void RunHelloWorldCompilation(ApiFactory const& factory,
+ bool is_hermetic = true,
+ int expected_queued = 0,
+ int expected_cached = 0) {
+ using path = std::filesystem::path;
+ SetupConfig();
+ auto const main_cpp_desc =
+ ArtifactDescription{path{"data/hello_world/main.cpp"}, ""};
+ auto const main_cpp_id = main_cpp_desc.Id();
+ std::string const make_hello_id = "make_hello";
+ auto const make_hello_desc = ActionDescription{
+ {"out/hello_world"},
+ {},
+ Action{make_hello_id,
+ {"c++", "src/main.cpp", "-o", "out/hello_world"},
+ {{"PATH", "/bin:/usr/bin"}}},
+ {{"src/main.cpp", main_cpp_desc}}};
+ auto const exec_desc =
+ ArtifactDescription{make_hello_id, "out/hello_world"};
+ auto const exec_id = exec_desc.Id();
+
+ DependencyGraph g;
+ CHECK(g.AddAction(make_hello_desc));
+ CHECK(g.ArtifactNodeWithId(exec_id)->HasBuilderAction());
+
+ auto api = factory();
+ Executor runner{api.get(), ReadPlatformPropertiesFromEnv()};
+
+ // upload local artifacts
+ auto const* main_cpp_node = g.ArtifactNodeWithId(main_cpp_id);
+ CHECK(main_cpp_node != nullptr);
+ CHECK(runner.Process(main_cpp_node));
+
+ // process action
+ CHECK(runner.Process(g.ArtifactNodeWithId(exec_id)->BuilderActionNode()));
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == expected_queued);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == expected_cached);
+ }
+
+ auto tmpdir = GetTestDir();
+
+ // retrieve ALL artifacts
+ REQUIRE(FileSystemManager::CreateDirectory(tmpdir));
+ for (auto const& artifact_id : g.ArtifactIdentifiers()) {
+ CHECK(api->RetrieveToPaths(
+ {*g.ArtifactNodeWithId(artifact_id)->Content().Info()},
+ {(tmpdir / "output").string()}));
+ CHECK(FileSystemManager::IsFile(tmpdir / "output"));
+ REQUIRE(FileSystemManager::RemoveFile(tmpdir / "output"));
+ }
+}
+
+static inline void RunGreeterCompilation(ApiFactory const& factory,
+ std::string const& greetcpp,
+ bool is_hermetic = true,
+ int expected_queued = 0,
+ int expected_cached = 0) {
+ using path = std::filesystem::path;
+ SetupConfig();
+ auto const greet_hpp_desc =
+ ArtifactDescription{path{"data/greeter/greet.hpp"}, ""};
+ auto const greet_hpp_id = greet_hpp_desc.Id();
+ auto const greet_cpp_desc =
+ ArtifactDescription{path{"data/greeter"} / greetcpp, ""};
+ auto const greet_cpp_id = greet_cpp_desc.Id();
+
+ std::string const compile_greet_id = "compile_greet";
+ auto const compile_greet_desc =
+ ActionDescription{{"out/greet.o"},
+ {},
+ Action{compile_greet_id,
+ {"c++",
+ "-c",
+ "src/greet.cpp",
+ "-I",
+ "include",
+ "-o",
+ "out/greet.o"},
+ {{"PATH", "/bin:/usr/bin"}}},
+ {{"include/greet.hpp", greet_hpp_desc},
+ {"src/greet.cpp", greet_cpp_desc}}};
+
+ auto const greet_o_desc =
+ ArtifactDescription{compile_greet_id, "out/greet.o"};
+ auto const greet_o_id = greet_o_desc.Id();
+
+ std::string const make_lib_id = "make_lib";
+ auto const make_lib_desc = ActionDescription{
+ {"out/libgreet.a"},
+ {},
+ Action{make_lib_id, {"ar", "rcs", "out/libgreet.a", "greet.o"}, {}},
+ {{"greet.o", greet_o_desc}}};
+
+ auto const main_cpp_desc =
+ ArtifactDescription{path{"data/greeter/main.cpp"}, ""};
+ auto const main_cpp_id = main_cpp_desc.Id();
+
+ auto const libgreet_desc =
+ ArtifactDescription{make_lib_id, "out/libgreet.a"};
+ auto const libgreet_id = libgreet_desc.Id();
+
+ std::string const make_exe_id = "make_exe";
+ auto const make_exe_desc =
+ ActionDescription{{"out/greeter"},
+ {},
+ Action{make_exe_id,
+ {"c++",
+ "src/main.cpp",
+ "-I",
+ "include",
+ "-L",
+ "lib",
+ "-lgreet",
+ "-o",
+ "out/greeter"},
+ {{"PATH", "/bin:/usr/bin"}}},
+ {{"src/main.cpp", main_cpp_desc},
+ {"include/greet.hpp", greet_hpp_desc},
+ {"lib/libgreet.a", libgreet_desc}}};
+
+ auto const exec_id = ArtifactDescription(make_exe_id, "out/greeter").Id();
+
+ DependencyGraph g;
+ CHECK(g.Add({compile_greet_desc, make_lib_desc, make_exe_desc}));
+
+ auto api = factory();
+ Executor runner{api.get(), ReadPlatformPropertiesFromEnv()};
+
+ // upload local artifacts
+ for (auto const& id : {greet_hpp_id, greet_cpp_id, main_cpp_id}) {
+ auto const* node = g.ArtifactNodeWithId(id);
+ CHECK(node != nullptr);
+ CHECK(runner.Process(node));
+ }
+
+ // process actions
+ CHECK(
+ runner.Process(g.ArtifactNodeWithId(greet_o_id)->BuilderActionNode()));
+ CHECK(
+ runner.Process(g.ArtifactNodeWithId(libgreet_id)->BuilderActionNode()));
+ CHECK(runner.Process(g.ArtifactNodeWithId(exec_id)->BuilderActionNode()));
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == expected_queued);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == expected_cached);
+ }
+
+ auto tmpdir = GetTestDir();
+
+ // retrieve ALL artifacts
+ REQUIRE(FileSystemManager::CreateDirectory(tmpdir));
+ for (auto const& artifact_id : g.ArtifactIdentifiers()) {
+ CHECK(api->RetrieveToPaths(
+ {*g.ArtifactNodeWithId(artifact_id)->Content().Info()},
+ {(tmpdir / "output").string()}));
+ CHECK(FileSystemManager::IsFile(tmpdir / "output"));
+ REQUIRE(FileSystemManager::RemoveFile(tmpdir / "output"));
+ }
+}
+
+[[maybe_unused]] static void TestBlobUpload(ApiFactory const& factory) {
+ SetupConfig();
+ // NOLINTNEXTLINE
+ RunBlobUpload(factory);
+}
+
+[[maybe_unused]] static void TestHelloWorldCompilation(
+ ApiFactory const& factory,
+ bool is_hermetic = true) {
+ SetupConfig();
+ // expecting 1 action queued, 0 results from cache
+ // NOLINTNEXTLINE
+ RunHelloWorldCompilation(factory, is_hermetic, 1, 0);
+
+ SECTION("Running same compilation again") {
+ // expecting 2 actions queued, 1 result from cache
+ // NOLINTNEXTLINE
+ RunHelloWorldCompilation(factory, is_hermetic, 2, 1);
+ }
+}
+
+[[maybe_unused]] static void TestGreeterCompilation(ApiFactory const& factory,
+ bool is_hermetic = true) {
+ SetupConfig();
+ // expecting 3 action queued, 0 results from cache
+ // NOLINTNEXTLINE
+ RunGreeterCompilation(factory, "greet.cpp", is_hermetic, 3, 0);
+
+ SECTION("Running same compilation again") {
+ // expecting 6 actions queued, 3 results from cache
+ // NOLINTNEXTLINE
+ RunGreeterCompilation(factory, "greet.cpp", is_hermetic, 6, 3);
+ }
+
+ SECTION("Running modified compilation") {
+ // expecting 6 actions queued, 2 results from cache
+ // NOLINTNEXTLINE
+ RunGreeterCompilation(factory, "greet_mod.cpp", is_hermetic, 6, 2);
+ }
+}
+
+static inline void TestUploadAndDownloadTrees(ApiFactory const& factory,
+ bool /*is_hermetic*/ = true,
+ int /*expected_queued*/ = 0,
+ int /*expected_cached*/ = 0) {
+ SetupConfig();
+ auto tmpdir = GetTestDir();
+
+ auto foo = std::string{"foo"};
+ auto bar = std::string{"bar"};
+ auto foo_digest = ArtifactDigest{ComputeHash(foo), foo.size()};
+ auto bar_digest = ArtifactDigest{ComputeHash(bar), bar.size()};
+
+ // upload blobs
+ auto api = factory();
+ REQUIRE(api->Upload(BlobContainer{
+ {BazelBlob{foo_digest, foo}, BazelBlob{bar_digest, bar}}}));
+
+ // define known artifacts
+ auto foo_desc = ArtifactDescription{foo_digest, ObjectType::File};
+ auto bar_desc = ArtifactDescription{bar_digest, ObjectType::File};
+
+ DependencyGraph g{};
+ auto foo_id = g.AddArtifact(foo_desc);
+ auto bar_id = g.AddArtifact(bar_desc);
+
+ Executor runner{api.get(), ReadPlatformPropertiesFromEnv()};
+ REQUIRE(runner.Process(g.ArtifactNodeWithId(foo_id)));
+ REQUIRE(runner.Process(g.ArtifactNodeWithId(bar_id)));
+
+ SECTION("Simple tree") {
+ auto tree_desc = Tree{{{"a", foo_desc}, {"b", bar_desc}}};
+ auto tree_info = AddAndProcessTree(&g, &runner, tree_desc);
+ REQUIRE(tree_info);
+ CHECK(IsTreeObject(tree_info->type));
+
+ tmpdir /= "simple";
+ CHECK(api->RetrieveToPaths({*tree_info}, {tmpdir.string()}));
+ CHECK(FileSystemManager::IsDirectory(tmpdir));
+ CHECK(FileSystemManager::IsFile(tmpdir / "a"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "b"));
+ CHECK(*FileSystemManager::ReadFile(tmpdir / "a") == "foo");
+ CHECK(*FileSystemManager::ReadFile(tmpdir / "b") == "bar");
+ REQUIRE(FileSystemManager::RemoveDirectory(tmpdir, true));
+ }
+
+ SECTION("Subdir in tree path") {
+ auto tree_desc = Tree{{{"a", foo_desc}, {"b/a", bar_desc}}};
+ auto tree_info = AddAndProcessTree(&g, &runner, tree_desc);
+ REQUIRE(tree_info);
+ CHECK(IsTreeObject(tree_info->type));
+
+ tmpdir /= "subdir";
+ CHECK(api->RetrieveToPaths({*tree_info}, {tmpdir.string()}));
+ CHECK(FileSystemManager::IsDirectory(tmpdir));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "b"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "a"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "b" / "a"));
+ CHECK(*FileSystemManager::ReadFile(tmpdir / "a") == "foo");
+ CHECK(*FileSystemManager::ReadFile(tmpdir / "b" / "a") == "bar");
+ REQUIRE(FileSystemManager::RemoveDirectory(tmpdir, true));
+ }
+
+ SECTION("Nested trees") {
+ auto tree_desc_nested = Tree{{{"a", bar_desc}}};
+ auto tree_desc_parent =
+ Tree{{{"a", foo_desc}, {"b", tree_desc_nested.Output()}}};
+
+ REQUIRE(AddAndProcessTree(&g, &runner, tree_desc_nested));
+ auto tree_info = AddAndProcessTree(&g, &runner, tree_desc_parent);
+ REQUIRE(tree_info);
+ CHECK(IsTreeObject(tree_info->type));
+
+ tmpdir /= "nested";
+ CHECK(api->RetrieveToPaths({*tree_info}, {tmpdir.string()}));
+ CHECK(FileSystemManager::IsDirectory(tmpdir));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "b"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "a"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "b" / "a"));
+ CHECK(*FileSystemManager::ReadFile(tmpdir / "a") == "foo");
+ CHECK(*FileSystemManager::ReadFile(tmpdir / "b" / "a") == "bar");
+ REQUIRE(FileSystemManager::RemoveDirectory(tmpdir, true));
+ }
+
+ SECTION("Dot-path tree as action input") {
+ auto tree_desc = Tree{{{"a", foo_desc}, {"b/a", bar_desc}}};
+ auto action_inputs =
+ ActionDescription::inputs_t{{".", tree_desc.Output()}};
+ ActionDescription action_desc{
+ {"a", "b/a"}, {}, Action{"action_id", {"echo"}, {}}, action_inputs};
+
+ REQUIRE(AddAndProcessTree(&g, &runner, tree_desc));
+ REQUIRE(g.Add({action_desc}));
+ auto const* action_node = g.ActionNodeWithId("action_id");
+ REQUIRE(runner.Process(action_node));
+
+ tmpdir /= "dotpath";
+ std::vector<Artifact::ObjectInfo> infos{};
+ std::vector<std::filesystem::path> paths{};
+ for (auto const& [path, node] : action_node->OutputFiles()) {
+ paths.emplace_back(tmpdir / path);
+ infos.emplace_back(*node->Content().Info());
+ }
+
+ CHECK(api->RetrieveToPaths(infos, paths));
+ CHECK(FileSystemManager::IsDirectory(tmpdir));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "b"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "a"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "b" / "a"));
+ CHECK(*FileSystemManager::ReadFile(tmpdir / "a") == "foo");
+ CHECK(*FileSystemManager::ReadFile(tmpdir / "b" / "a") == "bar");
+ REQUIRE(FileSystemManager::RemoveDirectory(tmpdir, true));
+ }
+
+ SECTION("Dot-path non-tree as action input") {
+ auto action_inputs = ActionDescription::inputs_t{{".", foo_desc}};
+ ActionDescription action_desc{
+ {"foo"}, {}, Action{"action_id", {"echo"}, {}}, action_inputs};
+
+ REQUIRE(g.Add({action_desc}));
+ auto const* action_node = g.ActionNodeWithId("action_id");
+ REQUIRE_FALSE(runner.Process(action_node));
+ }
+}
+
+static inline void TestRetrieveOutputDirectories(ApiFactory const& factory,
+ bool /*is_hermetic*/ = true,
+ int /*expected_queued*/ = 0,
+ int /*expected_cached*/ = 0) {
+ SetupConfig();
+ auto tmpdir = GetTestDir();
+
+ auto const make_tree_id = std::string{"make_tree"};
+ auto const* make_tree_cmd =
+ "mkdir -p baz/baz/\n"
+ "touch foo bar\n"
+ "touch baz/foo baz/bar\n"
+ "touch baz/baz/foo baz/baz/bar";
+
+ auto create_action = [&make_tree_id, make_tree_cmd](
+ std::vector<std::string>&& out_files,
+ std::vector<std::string>&& out_dirs) {
+ return ActionDescription{std::move(out_files),
+ std::move(out_dirs),
+ Action{make_tree_id,
+ {"sh", "-c", make_tree_cmd},
+ {{"PATH", "/bin:/usr/bin"}}},
+ {}};
+ };
+
+ SECTION("entire action output as directory") {
+ auto const make_tree_desc = create_action({}, {""});
+ auto const root_desc = ArtifactDescription{make_tree_id, ""};
+
+ DependencyGraph g{};
+ REQUIRE(g.AddAction(make_tree_desc));
+
+ auto const* action = g.ActionNodeWithId(make_tree_id);
+ REQUIRE_FALSE(action == nullptr);
+ auto const* root = g.ArtifactNodeWithId(root_desc.Id());
+ REQUIRE_FALSE(root == nullptr);
+
+ // run action
+ auto api = factory();
+ Executor runner{api.get(), ReadPlatformPropertiesFromEnv()};
+ REQUIRE(runner.Process(action));
+
+ // read output
+ auto root_info = root->Content().Info();
+ REQUIRE(root_info);
+ CHECK(IsTreeObject(root_info->type));
+
+ // retrieve ALL artifacts
+ auto tmpdir = GetTestDir() / "entire_output";
+ REQUIRE(FileSystemManager::CreateDirectory(tmpdir));
+
+ REQUIRE(api->RetrieveToPaths({*root_info}, {tmpdir}));
+ CHECK(FileSystemManager::IsFile(tmpdir / "foo"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "bar"));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "baz"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "baz" / "foo"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "baz" / "bar"));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "baz" / "baz"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "baz" / "baz" / "foo"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "baz" / "baz" / "bar"));
+ }
+
+ SECTION("disjoint files and directories") {
+ auto const make_tree_desc = create_action({"foo", "bar"}, {"baz"});
+ auto const foo_desc = ArtifactDescription{make_tree_id, "foo"};
+ auto const bar_desc = ArtifactDescription{make_tree_id, "bar"};
+ auto const baz_desc = ArtifactDescription{make_tree_id, "baz"};
+
+ DependencyGraph g{};
+ REQUIRE(g.AddAction(make_tree_desc));
+
+ auto const* action = g.ActionNodeWithId(make_tree_id);
+ REQUIRE_FALSE(action == nullptr);
+ auto const* foo = g.ArtifactNodeWithId(foo_desc.Id());
+ REQUIRE_FALSE(foo == nullptr);
+ auto const* bar = g.ArtifactNodeWithId(bar_desc.Id());
+ REQUIRE_FALSE(bar == nullptr);
+ auto const* baz = g.ArtifactNodeWithId(baz_desc.Id());
+ REQUIRE_FALSE(baz == nullptr);
+
+ // run action
+ auto api = factory();
+ Executor runner{api.get(), ReadPlatformPropertiesFromEnv()};
+ REQUIRE(runner.Process(action));
+
+ // read output
+ auto foo_info = foo->Content().Info();
+ REQUIRE(foo_info);
+ CHECK(IsFileObject(foo_info->type));
+
+ auto bar_info = bar->Content().Info();
+ REQUIRE(bar_info);
+ CHECK(IsFileObject(bar_info->type));
+
+ auto baz_info = baz->Content().Info();
+ REQUIRE(baz_info);
+ CHECK(IsTreeObject(baz_info->type));
+
+ // retrieve ALL artifacts
+ auto tmpdir = GetTestDir() / "disjoint";
+ REQUIRE(FileSystemManager::CreateDirectory(tmpdir));
+
+ REQUIRE(api->RetrieveToPaths({*foo_info}, {tmpdir / "foo"}));
+ CHECK(FileSystemManager::IsFile(tmpdir / "foo"));
+
+ REQUIRE(api->RetrieveToPaths({*bar_info}, {tmpdir / "bar"}));
+ CHECK(FileSystemManager::IsFile(tmpdir / "bar"));
+
+ REQUIRE(api->RetrieveToPaths({*baz_info}, {tmpdir / "baz"}));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "baz"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "baz" / "foo"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "baz" / "bar"));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "baz" / "baz"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "baz" / "baz" / "foo"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "baz" / "baz" / "bar"));
+ }
+
+ SECTION("nested files and directories") {
+ auto const make_tree_desc =
+ create_action({"foo", "baz/bar"}, {"", "baz/baz"});
+ auto const root_desc = ArtifactDescription{make_tree_id, ""};
+ auto const foo_desc = ArtifactDescription{make_tree_id, "foo"};
+ auto const bar_desc = ArtifactDescription{make_tree_id, "baz/bar"};
+ auto const baz_desc = ArtifactDescription{make_tree_id, "baz/baz"};
+
+ DependencyGraph g{};
+ REQUIRE(g.AddAction(make_tree_desc));
+
+ auto const* action = g.ActionNodeWithId(make_tree_id);
+ REQUIRE_FALSE(action == nullptr);
+ auto const* root = g.ArtifactNodeWithId(root_desc.Id());
+ REQUIRE_FALSE(root == nullptr);
+ auto const* foo = g.ArtifactNodeWithId(foo_desc.Id());
+ REQUIRE_FALSE(foo == nullptr);
+ auto const* bar = g.ArtifactNodeWithId(bar_desc.Id());
+ REQUIRE_FALSE(bar == nullptr);
+ auto const* baz = g.ArtifactNodeWithId(baz_desc.Id());
+ REQUIRE_FALSE(baz == nullptr);
+
+ // run action
+ auto api = factory();
+ Executor runner{api.get(), ReadPlatformPropertiesFromEnv()};
+ REQUIRE(runner.Process(action));
+
+ // read output
+ auto root_info = root->Content().Info();
+ REQUIRE(root_info);
+ CHECK(IsTreeObject(root_info->type));
+
+ auto foo_info = foo->Content().Info();
+ REQUIRE(foo_info);
+ CHECK(IsFileObject(foo_info->type));
+
+ auto bar_info = bar->Content().Info();
+ REQUIRE(bar_info);
+ CHECK(IsFileObject(bar_info->type));
+
+ auto baz_info = baz->Content().Info();
+ REQUIRE(baz_info);
+ CHECK(IsTreeObject(baz_info->type));
+
+ // retrieve ALL artifacts
+ auto tmpdir = GetTestDir() / "baz";
+ REQUIRE(FileSystemManager::CreateDirectory(tmpdir));
+
+ REQUIRE(api->RetrieveToPaths({*root_info}, {tmpdir / "root"}));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "root"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "root" / "foo"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "root" / "bar"));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "root" / "baz"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "root" / "baz" / "foo"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "root" / "baz" / "bar"));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "root" / "baz" / "baz"));
+ CHECK(
+ FileSystemManager::IsFile(tmpdir / "root" / "baz" / "baz" / "foo"));
+ CHECK(
+ FileSystemManager::IsFile(tmpdir / "root" / "baz" / "baz" / "bar"));
+
+ REQUIRE(api->RetrieveToPaths({*foo_info}, {tmpdir / "foo"}));
+ CHECK(FileSystemManager::IsFile(tmpdir / "foo"));
+
+ REQUIRE(api->RetrieveToPaths({*bar_info}, {tmpdir / "bar"}));
+ CHECK(FileSystemManager::IsFile(tmpdir / "bar"));
+
+ REQUIRE(api->RetrieveToPaths({*baz_info}, {tmpdir / "baz"}));
+ CHECK(FileSystemManager::IsDirectory(tmpdir / "baz"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "baz" / "foo"));
+ CHECK(FileSystemManager::IsFile(tmpdir / "baz" / "bar"));
+ }
+
+ SECTION("non-existing outputs") {
+ SECTION("non-existing file") {
+ auto const make_tree_desc = create_action({"fool"}, {});
+ auto const fool_desc = ArtifactDescription{make_tree_id, "fool"};
+
+ DependencyGraph g{};
+ REQUIRE(g.AddAction(make_tree_desc));
+
+ auto const* action = g.ActionNodeWithId(make_tree_id);
+ REQUIRE_FALSE(action == nullptr);
+ auto const* fool = g.ArtifactNodeWithId(fool_desc.Id());
+ REQUIRE_FALSE(fool == nullptr);
+
+ // run action
+ auto api = factory();
+ Executor runner{api.get(), ReadPlatformPropertiesFromEnv()};
+ CHECK_FALSE(runner.Process(action));
+ }
+
+ SECTION("non-existing directory") {
+ auto const make_tree_desc = create_action({"bazel"}, {});
+ auto const bazel_desc = ArtifactDescription{make_tree_id, "bazel"};
+
+ DependencyGraph g{};
+ REQUIRE(g.AddAction(make_tree_desc));
+
+ auto const* action = g.ActionNodeWithId(make_tree_id);
+ REQUIRE_FALSE(action == nullptr);
+ auto const* bazel = g.ArtifactNodeWithId(bazel_desc.Id());
+ REQUIRE_FALSE(bazel == nullptr);
+
+ // run action
+ auto api = factory();
+ Executor runner{api.get(), ReadPlatformPropertiesFromEnv()};
+ CHECK_FALSE(runner.Process(action));
+ }
+ }
+}
+
+#endif // INCLUDED_SRC_TEST_BUILDTOOL_EXECUTION_ENGINE_EXECUTOR_EXECUTOR_API_TEST_HPP
diff --git a/test/buildtool/execution_engine/executor/executor_api_local.test.cpp b/test/buildtool/execution_engine/executor/executor_api_local.test.cpp
new file mode 100755
index 00000000..955e1682
--- /dev/null
+++ b/test/buildtool/execution_engine/executor/executor_api_local.test.cpp
@@ -0,0 +1,36 @@
+#include "catch2/catch.hpp"
+#include "src/buildtool/execution_api/local/local_api.hpp"
+#include "src/buildtool/execution_api/remote/config.hpp"
+#include "src/buildtool/execution_engine/executor/executor.hpp"
+#include "test/buildtool/execution_engine/executor/executor_api.test.hpp"
+#include "test/utils/hermeticity/local.hpp"
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Executor<LocalApi>: Upload blob",
+ "[executor]") {
+ TestBlobUpload([&] { return std::make_unique<LocalApi>(); });
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Executor<LocalApi>: Compile hello world",
+ "[executor]") {
+ TestHelloWorldCompilation([&] { return std::make_unique<LocalApi>(); });
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Executor<LocalApi>: Compile greeter",
+ "[executor]") {
+ TestGreeterCompilation([&] { return std::make_unique<LocalApi>(); });
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Executor<LocalApi>: Upload and download trees",
+ "[executor]") {
+ TestUploadAndDownloadTrees([&] { return std::make_unique<LocalApi>(); });
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Executor<LocalApi>: Retrieve output directories",
+ "[executor]") {
+ TestRetrieveOutputDirectories([&] { return std::make_unique<LocalApi>(); });
+}
diff --git a/test/buildtool/execution_engine/executor/executor_api_remote_bazel.test.cpp b/test/buildtool/execution_engine/executor/executor_api_remote_bazel.test.cpp
new file mode 100755
index 00000000..d6ad57b1
--- /dev/null
+++ b/test/buildtool/execution_engine/executor/executor_api_remote_bazel.test.cpp
@@ -0,0 +1,71 @@
+#include "catch2/catch.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_api.hpp"
+#include "src/buildtool/execution_api/remote/config.hpp"
+#include "src/buildtool/execution_engine/executor/executor.hpp"
+#include "test/buildtool/execution_engine/executor/executor_api.test.hpp"
+
+TEST_CASE("Executor<BazelApi>: Upload blob", "[executor]") {
+ ExecutionConfiguration config;
+ auto const& info = RemoteExecutionConfig::Instance();
+
+ TestBlobUpload([&] {
+ return BazelApi::Ptr{
+ new BazelApi{"remote-execution", info.Host(), info.Port(), config}};
+ });
+}
+
+TEST_CASE("Executor<BazelApi>: Compile hello world", "[executor]") {
+ ExecutionConfiguration config;
+ config.skip_cache_lookup = false;
+
+ auto const& info = RemoteExecutionConfig::Instance();
+
+ TestHelloWorldCompilation(
+ [&] {
+ return BazelApi::Ptr{new BazelApi{
+ "remote-execution", info.Host(), info.Port(), config}};
+ },
+ false /* not hermetic */);
+}
+
+TEST_CASE("Executor<BazelApi>: Compile greeter", "[executor]") {
+ ExecutionConfiguration config;
+ config.skip_cache_lookup = false;
+
+ auto const& info = RemoteExecutionConfig::Instance();
+
+ TestGreeterCompilation(
+ [&] {
+ return BazelApi::Ptr{new BazelApi{
+ "remote-execution", info.Host(), info.Port(), config}};
+ },
+ false /* not hermetic */);
+}
+
+TEST_CASE("Executor<BazelApi>: Upload and download trees", "[executor]") {
+ ExecutionConfiguration config;
+ config.skip_cache_lookup = false;
+
+ auto const& info = RemoteExecutionConfig::Instance();
+
+ TestUploadAndDownloadTrees(
+ [&] {
+ return BazelApi::Ptr{new BazelApi{
+ "remote-execution", info.Host(), info.Port(), config}};
+ },
+ false /* not hermetic */);
+}
+
+TEST_CASE("Executor<BazelApi>: Retrieve output directories", "[executor]") {
+ ExecutionConfiguration config;
+ config.skip_cache_lookup = false;
+
+ auto const& info = RemoteExecutionConfig::Instance();
+
+ TestRetrieveOutputDirectories(
+ [&] {
+ return BazelApi::Ptr{new BazelApi{
+ "remote-execution", info.Host(), info.Port(), config}};
+ },
+ false /* not hermetic */);
+}
diff --git a/test/buildtool/execution_engine/traverser/TARGETS b/test/buildtool/execution_engine/traverser/TARGETS
new file mode 100644
index 00000000..6a84a91c
--- /dev/null
+++ b/test/buildtool/execution_engine/traverser/TARGETS
@@ -0,0 +1,16 @@
+{ "traverser":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["traverser"]
+ , "srcs": ["traverser.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/common", "artifact_factory"]
+ , ["src/buildtool/execution_engine/dag", "dag"]
+ , ["src/buildtool/execution_engine/traverser", "traverser"]
+ ]
+ , "stage": ["test", "buildtool", "execution_engine", "traverser"]
+ }
+, "TESTS": {"type": "install", "tainted": ["test"], "deps": ["traverser"]}
+} \ No newline at end of file
diff --git a/test/buildtool/execution_engine/traverser/traverser.test.cpp b/test/buildtool/execution_engine/traverser/traverser.test.cpp
new file mode 100644
index 00000000..63d1fad6
--- /dev/null
+++ b/test/buildtool/execution_engine/traverser/traverser.test.cpp
@@ -0,0 +1,836 @@
+#include <algorithm>
+#include <mutex>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+#include <variant>
+#include <vector>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/artifact_factory.hpp"
+#include "src/buildtool/execution_engine/dag/dag.hpp"
+#include "src/buildtool/execution_engine/traverser/traverser.hpp"
+#include "test/utils/container_matchers.hpp"
+
+namespace {
+
+auto const kNumJobs = std::max(1U, std::thread::hardware_concurrency());
+
+class TestBuildInfo {
+ public:
+ [[nodiscard]] auto CorrectlyBuilt() const noexcept
+ -> std::unordered_set<ArtifactIdentifier> {
+ return correctly_built_;
+ }
+
+ [[nodiscard]] auto IncorrectlyBuilt() const noexcept
+ -> std::unordered_set<ArtifactIdentifier> {
+ return incorrectly_built_;
+ }
+
+ [[nodiscard]] auto ArtifactsUploaded() const noexcept
+ -> std::unordered_set<ArtifactIdentifier> {
+ return artifacts_uploaded_;
+ }
+
+ [[nodiscard]] auto WasUploadRepeated() noexcept -> bool {
+ return not uploaded_more_than_once_.empty();
+ }
+
+ [[nodiscard]] auto Name() const noexcept -> std::string { return name_; }
+
+ void SetName(std::string const& name) noexcept {
+ std::lock_guard lock{mutex_};
+ name_ = name;
+ }
+
+ void SetName(std::string&& name) noexcept {
+ std::lock_guard lock{mutex_};
+ name_ = std::move(name);
+ }
+
+ [[nodiscard]] auto InsertCorrectlyBuilt(
+ ArtifactIdentifier const& artifact_id) -> bool {
+ std::lock_guard lock{mutex_};
+ auto const [_, first_time_added] = correctly_built_.insert(artifact_id);
+ return first_time_added;
+ }
+
+ [[nodiscard]] auto InsertIncorrectlyBuilt(
+ ArtifactIdentifier const& artifact_id) -> bool {
+ std::lock_guard lock{mutex_};
+ auto const [_, first_time_added] =
+ incorrectly_built_.insert(artifact_id);
+ return first_time_added;
+ }
+
+ auto InsertArtifactUploaded(ArtifactIdentifier const& artifact_id) -> bool {
+ std::lock_guard lock{mutex_};
+ auto const [_, first_time_added] =
+ artifacts_uploaded_.insert(artifact_id);
+ if (not first_time_added) {
+ uploaded_more_than_once_.insert(artifact_id);
+ }
+ return true;
+ }
+
+ private:
+ std::unordered_set<ArtifactIdentifier> correctly_built_{};
+ std::unordered_set<ArtifactIdentifier> incorrectly_built_{};
+ std::unordered_set<ArtifactIdentifier> artifacts_uploaded_{};
+ std::unordered_set<ArtifactIdentifier> uploaded_more_than_once_{};
+ std::string name_{};
+ std::mutex mutex_;
+};
+
+class TestExecutor {
+ public:
+ explicit TestExecutor(TestBuildInfo* info) noexcept
+ : name_{info->Name()}, build_info_{info} {}
+
+ [[nodiscard]] auto Process(
+ gsl::not_null<DependencyGraph::ActionNode const*> const& action)
+ const noexcept -> bool {
+ try {
+ build_info_->SetName(name_);
+ bool const all_deps_available = AllAvailable(action->Children());
+ if (all_deps_available) {
+ for (auto const& [name, node] : action->OutputFiles()) {
+ if (not build_info_->InsertCorrectlyBuilt(
+ node->Content().Id())) {
+ [[maybe_unused]] auto was_it_added =
+ build_info_->InsertIncorrectlyBuilt(
+ node->Content().Id());
+ return false;
+ }
+ }
+ return true;
+ }
+ for (auto const& [name, node] : action->OutputFiles()) {
+ [[maybe_unused]] auto was_it_added =
+ build_info_->InsertIncorrectlyBuilt(node->Content().Id());
+ }
+ } catch (...) {
+ }
+ return false;
+ }
+
+ [[nodiscard]] auto Process(
+ gsl::not_null<DependencyGraph::ArtifactNode const*> const& artifact)
+ const noexcept -> bool {
+ try {
+ build_info_->InsertArtifactUploaded(artifact->Content().Id());
+ } catch (...) {
+ return false;
+ }
+ return true;
+ }
+
+ private:
+ std::string const name_;
+ TestBuildInfo* build_info_;
+
+ template <typename Container>
+ [[nodiscard]] auto AllAvailable(Container&& c) const noexcept -> bool {
+ return std::all_of(std::begin(c), std::end(c), [](auto node) {
+ return node->TraversalState()->IsAvailable();
+ });
+ }
+};
+
+// Class to simplify the writing of tests, checking that no outputs are repeated
+// and keeping track of what needs to be built
+class TestProject {
+ public:
+ auto AddOutputInputPair(std::string const& action_id,
+ std::vector<std::string> const& outputs,
+ std::vector<nlohmann::json> const& inputs) -> bool {
+ std::vector<std::string> command;
+ command.emplace_back("BUILD");
+ for (auto const& output : outputs) {
+ command.push_back(output);
+ auto const out_id = ArtifactDescription{
+ action_id,
+ std::filesystem::path{
+ output}}.Id();
+ auto [_, is_inserted] = artifacts_to_be_built_.insert(out_id);
+ if (!is_inserted) {
+ return false;
+ }
+ }
+ auto inputs_desc = ActionDescription::inputs_t{};
+ if (!inputs.empty()) {
+ command.emplace_back("FROM");
+ for (auto const& input_desc : inputs) {
+ auto artifact = ArtifactDescription::FromJson(input_desc);
+ REQUIRE(artifact);
+ auto const input_id = artifact->Id();
+ command.push_back(input_id);
+ inputs_desc.emplace(input_id, *artifact);
+ if (ArtifactFactory::IsLocal(input_desc)) {
+ local_artifacts_.insert(input_id);
+ }
+ }
+ }
+ graph_full_description_.emplace_back(ActionDescription{
+ outputs, {}, Action{action_id, command, {}}, inputs_desc});
+ return true;
+ }
+
+ auto FillGraph(gsl::not_null<DependencyGraph*> const& g) -> bool {
+ return g->Add(graph_full_description_);
+ }
+
+ [[nodiscard]] auto ArtifactsToBeBuilt() const noexcept
+ -> std::unordered_set<ArtifactIdentifier> {
+ return artifacts_to_be_built_;
+ }
+
+ [[nodiscard]] auto LocalArtifacts() const noexcept
+ -> std::unordered_set<ArtifactIdentifier> {
+ return local_artifacts_;
+ }
+
+ private:
+ std::vector<ActionDescription> graph_full_description_{};
+ std::unordered_set<ArtifactIdentifier> artifacts_to_be_built_{};
+ std::unordered_set<ArtifactIdentifier> local_artifacts_{};
+};
+
+} // namespace
+
+TEST_CASE("Executable", "[traverser]") {
+ TestProject p;
+ CHECK(p.AddOutputInputPair(
+ "action",
+ {"executable"},
+ {ArtifactFactory::DescribeLocalArtifact("main.cpp", "")}));
+ DependencyGraph g;
+ CHECK(p.FillGraph(&g));
+ TestBuildInfo build_info;
+ std::string name = "This is a long name that shouldn't be corrupted";
+ build_info.SetName(name);
+ SECTION("Traverse()") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse());
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Traverse(executable)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+
+ auto const exec_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("action",
+ "executable"));
+ auto const traversed = traverser.Traverse({exec_id});
+ CHECK(traversed);
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+}
+
+TEST_CASE("Executable depends on library", "[traverser]") {
+ TestProject p;
+ CHECK(p.AddOutputInputPair(
+ "make_exe",
+ {"executable"},
+ {ArtifactFactory::DescribeLocalArtifact("main.cpp", "repo"),
+ ArtifactFactory::DescribeActionArtifact("make_lib", "library")}));
+ CHECK(p.AddOutputInputPair(
+ "make_lib",
+ {"library"},
+ {ArtifactFactory::DescribeLocalArtifact("library.hpp", "repo"),
+ ArtifactFactory::DescribeLocalArtifact("library.cpp", "repo")}));
+ DependencyGraph g;
+ CHECK(p.FillGraph(&g));
+ TestBuildInfo build_info;
+ std::string name = "This is a long name that shouldn't be corrupted";
+ build_info.SetName(name);
+ SECTION("Full build (without specifying artifacts)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse());
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Full build (executable)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ auto const exec_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("make_exe",
+ "executable"));
+ CHECK(traverser.Traverse({exec_id}));
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Only build library") {
+ auto const lib_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("make_lib", "library"));
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse({lib_id}));
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ {lib_id}));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ auto const lib_cpp_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeLocalArtifact("library.cpp", "repo"));
+ auto const lib_hpp_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeLocalArtifact("library.hpp", "repo"));
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ {lib_cpp_id, lib_hpp_id}));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+}
+
+TEST_CASE("Two artifacts depend on another", "[traverser]") {
+ TestProject p;
+ auto const dep_desc =
+ ArtifactFactory::DescribeActionArtifact("make_dep", "dep");
+ auto const dep_id = ArtifactFactory::Identifier(dep_desc);
+ CHECK(p.AddOutputInputPair("action1", {"toplevel1"}, {dep_desc}));
+ CHECK(p.AddOutputInputPair("action2", {"toplevel2"}, {dep_desc}));
+ CHECK(p.AddOutputInputPair(
+ "make_dep",
+ {"dep"},
+ {ArtifactFactory::DescribeLocalArtifact("leaf1", "repo"),
+ ArtifactFactory::DescribeLocalArtifact("leaf2", "repo")}));
+ DependencyGraph g;
+ CHECK(p.FillGraph(&g));
+ TestBuildInfo build_info;
+ std::string name = "This is a long name that shouldn't be corrupted";
+ build_info.SetName(name);
+ SECTION("Full build") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse());
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Only specified top-level artifact is built") {
+ auto const toplevel1_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("action1", "toplevel1"));
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse({toplevel1_id}));
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ {toplevel1_id, dep_id}));
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK(build_info.Name() == name);
+ }
+}
+
+TEST_CASE("Action with two outputs, no deps", "[traverser]") {
+ TestProject p;
+ CHECK(p.AddOutputInputPair("make_outputs", {"output1", "output2"}, {}));
+ auto const output1_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("make_outputs", "output1"));
+ auto const output2_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("make_outputs", "output2"));
+ DependencyGraph g;
+ CHECK(p.FillGraph(&g));
+ TestBuildInfo build_info;
+ std::string name = "This is a long name that shouldn't be corrupted";
+ build_info.SetName(name);
+ SECTION("Traverse()") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse());
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Traverse(output1)") {
+
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ auto const traversed = traverser.Traverse({output1_id});
+ CHECK(traversed);
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Traverse(output1, output2)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ auto const traversed = traverser.Traverse({output1_id, output2_id});
+ CHECK(traversed);
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+}
+
+TEST_CASE("Action with two outputs, one dep", "[traverser]") {
+ TestProject p;
+ CHECK(p.AddOutputInputPair(
+ "make_outputs",
+ {"output1", "output2"},
+ {ArtifactFactory::DescribeLocalArtifact("dep", "repo")}));
+ auto const output1_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("make_outputs", "output1"));
+ auto const output2_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("make_outputs", "output2"));
+ DependencyGraph g;
+ CHECK(p.FillGraph(&g));
+ TestBuildInfo build_info;
+ std::string name = "This is a long name that shouldn't be corrupted";
+ build_info.SetName(name);
+ SECTION("Traverse()") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse());
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Traverse(output1)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ auto const traversed = traverser.Traverse({output1_id});
+ CHECK(traversed);
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Traverse(output1, output2)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ auto const traversed = traverser.Traverse({output1_id, output2_id});
+ CHECK(traversed);
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Traverse(dep, output2)") {
+ auto const dep_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeLocalArtifact("dep", "repo"));
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ auto const traversed = traverser.Traverse({dep_id, output2_id});
+ CHECK(traversed);
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+}
+
+TEST_CASE("Action with two outputs, actions depend on each of outputs",
+ "[traverser]") {
+ TestProject p;
+ CHECK(p.AddOutputInputPair("make_outputs", {"output1", "output2"}, {}));
+ auto const output1_desc =
+ ArtifactFactory::DescribeActionArtifact("make_outputs", "output1");
+ auto const output1_id = ArtifactFactory::Identifier(output1_desc);
+ auto const output2_desc =
+ ArtifactFactory::DescribeActionArtifact("make_outputs", "output2");
+ auto const output2_id = ArtifactFactory::Identifier(output2_desc);
+
+ CHECK(p.AddOutputInputPair("consumer1", {"exec1"}, {output1_desc}));
+ auto const exec1_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("consumer1", "exec1"));
+
+ CHECK(p.AddOutputInputPair("consumer2", {"exec2"}, {output2_desc}));
+ auto const exec2_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("consumer2", "exec2"));
+
+ DependencyGraph g;
+ CHECK(p.FillGraph(&g));
+ TestBuildInfo build_info;
+ std::string name = "This is a long name that shouldn't be corrupted";
+ build_info.SetName(name);
+ SECTION("Traverse()") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse());
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Traverse(exec1)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ auto const traversed = traverser.Traverse({exec1_id});
+ CHECK(traversed);
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ {exec1_id, output1_id, output2_id}));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Traverse(exec2, output1)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ auto const traversed = traverser.Traverse({output1_id, exec2_id});
+ CHECK(traversed);
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ {exec2_id, output1_id, output2_id}));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Traverse(exec1, exec2)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ auto const traversed = traverser.Traverse({exec1_id, exec2_id});
+ CHECK(traversed);
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+}
+
+TEST_CASE("lib2 depends on lib1, executable depends on lib1 and lib2") {
+ TestProject p;
+ auto const lib1_desc =
+ ArtifactFactory::DescribeActionArtifact("make_lib1", "lib1");
+ auto const lib1_id = ArtifactFactory::Identifier(lib1_desc);
+
+ auto const lib2_desc =
+ ArtifactFactory::DescribeActionArtifact("make_lib2", "lib2");
+ auto const lib2_id = ArtifactFactory::Identifier(lib2_desc);
+
+ auto const exec_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeActionArtifact("make_exe", "executable"));
+
+ CHECK(p.AddOutputInputPair(
+ "make_exe",
+ {"executable"},
+ {ArtifactFactory::DescribeLocalArtifact("main.cpp", "repo"),
+ lib1_desc,
+ lib2_desc}));
+
+ CHECK(p.AddOutputInputPair(
+ "make_lib1",
+ {"lib1"},
+ {ArtifactFactory::DescribeLocalArtifact("lib1.hpp", "repo"),
+ ArtifactFactory::DescribeLocalArtifact("lib1.cpp", "repo")}));
+ CHECK(p.AddOutputInputPair(
+ "make_lib2",
+ {"lib2"},
+ {lib1_desc,
+ ArtifactFactory::DescribeLocalArtifact("lib2.hpp", "repo"),
+ ArtifactFactory::DescribeLocalArtifact("lib2.cpp", "repo")}));
+
+ DependencyGraph g;
+ CHECK(p.FillGraph(&g));
+ TestBuildInfo build_info;
+ std::string name = "This is a long name that shouldn't be corrupted ";
+ build_info.SetName(name);
+ SECTION(" Full build(without specifying artifacts) ") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse());
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Full build (executable)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse({exec_id}));
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Full build (executable + lib1)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse({exec_id, lib1_id}));
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Full build (executable + lib2)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse({exec_id, lib2_id}));
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("Full build (executable + lib1 + lib2)") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse({exec_id, lib1_id, lib2_id}));
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+ SECTION("First call does not build all artifacts") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse({lib1_id}));
+ CHECK(traverser.Traverse({exec_id}));
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.ArtifactsToBeBuilt()));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ p.LocalArtifacts()));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+
+ SECTION(
+ "Traverse(lib2), executable is not built even if lib1 would notify its "
+ "action") {
+ {
+ TestExecutor runner{&build_info};
+ Traverser traverser(runner, g, kNumJobs);
+ CHECK(traverser.Traverse({lib2_id}));
+ }
+ CHECK_THAT(
+ build_info.CorrectlyBuilt(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ {lib1_id, lib2_id}));
+ CHECK(build_info.IncorrectlyBuilt().empty());
+ auto const lib1_hpp_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeLocalArtifact("lib1.hpp", "repo"));
+ auto const lib1_cpp_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeLocalArtifact("lib1.cpp", "repo"));
+ auto const lib2_hpp_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeLocalArtifact("lib2.hpp", "repo"));
+ auto const lib2_cpp_id = ArtifactFactory::Identifier(
+ ArtifactFactory::DescribeLocalArtifact("lib2.cpp", "repo"));
+ CHECK_THAT(
+ build_info.ArtifactsUploaded(),
+ HasSameUniqueElementsAs<std::unordered_set<ArtifactIdentifier>>(
+ {lib1_hpp_id, lib1_cpp_id, lib2_hpp_id, lib2_cpp_id}));
+ CHECK_FALSE(build_info.WasUploadRepeated());
+ CHECK(build_info.Name() == name);
+ }
+}
diff --git a/test/buildtool/file_system/TARGETS b/test/buildtool/file_system/TARGETS
new file mode 100644
index 00000000..fdb2296d
--- /dev/null
+++ b/test/buildtool/file_system/TARGETS
@@ -0,0 +1,62 @@
+{ "file_system_manager":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["file_system_manager"]
+ , "srcs": ["file_system_manager.test.cpp"]
+ , "data": ["test_data"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ ]
+ , "stage": ["test", "buildtool", "file_system"]
+ }
+, "system_command":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["system_command"]
+ , "srcs": ["system_command.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/file_system", "system_command"]
+ ]
+ , "stage": ["test", "buildtool", "file_system"]
+ }
+, "git_tree":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["git_tree"]
+ , "srcs": ["git_tree.test.cpp"]
+ , "data": ["test_data"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/file_system", "git_tree"]
+ ]
+ , "stage": ["test", "buildtool", "file_system"]
+ }
+, "file_root":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["file_root"]
+ , "srcs": ["file_root.test.cpp"]
+ , "data": ["test_data"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/common", "artifact_description"]
+ , ["src/buildtool/file_system", "file_root"]
+ ]
+ , "stage": ["test", "buildtool", "file_system"]
+ }
+, "test_data":
+ { "type": ["@", "rules", "data", "staged"]
+ , "srcs":
+ ["data/empty_executable", "data/example_file", "data/test_repo.bundle"]
+ , "stage": ["test", "buildtool", "file_system"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps": ["file_root", "file_system_manager", "git_tree", "system_command"]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/file_system/data/empty_executable b/test/buildtool/file_system/data/empty_executable
new file mode 100755
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/file_system/data/empty_executable
diff --git a/test/buildtool/file_system/data/example_file b/test/buildtool/file_system/data/example_file
new file mode 100644
index 00000000..4acf82b6
--- /dev/null
+++ b/test/buildtool/file_system/data/example_file
@@ -0,0 +1,3 @@
+First line
+Line 2
+Last line with content
diff --git a/test/buildtool/file_system/data/test_repo.bundle b/test/buildtool/file_system/data/test_repo.bundle
new file mode 100644
index 00000000..7bf3c5a0
--- /dev/null
+++ b/test/buildtool/file_system/data/test_repo.bundle
Binary files differ
diff --git a/test/buildtool/file_system/file_root.test.cpp b/test/buildtool/file_system/file_root.test.cpp
new file mode 100644
index 00000000..a934161b
--- /dev/null
+++ b/test/buildtool/file_system/file_root.test.cpp
@@ -0,0 +1,224 @@
+#include <thread>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/artifact_description.hpp"
+#include "src/buildtool/file_system/file_root.hpp"
+#include "test/utils/container_matchers.hpp"
+
+namespace {
+
+auto const kBundlePath =
+ std::string{"test/buildtool/file_system/data/test_repo.bundle"};
+auto const kTreeId = std::string{"e51a219a27b672ccf17abec7d61eb4d6e0424140"};
+auto const kFooId = std::string{"19102815663d23f8b75a47e7a01965dcdc96468c"};
+auto const kBarId = std::string{"ba0e162e1c47469e3fe4b393a8bf8c569f302116"};
+
+[[nodiscard]] auto GetTestDir() -> std::filesystem::path {
+ auto* tmp_dir = std::getenv("TEST_TMPDIR");
+ if (tmp_dir != nullptr) {
+ return tmp_dir;
+ }
+ return FileSystemManager::GetCurrentDirectory() /
+ "test/buildtool/file_system";
+}
+
+[[nodiscard]] auto CreateTestRepo(bool do_checkout = false)
+ -> std::optional<std::filesystem::path> {
+ auto repo_path = GetTestDir() / "test_repo" /
+ std::filesystem::path{std::tmpnam(nullptr)}.filename();
+ auto cmd = fmt::format("git clone {}{} {}",
+ do_checkout ? "--branch master " : "",
+ kBundlePath,
+ repo_path.string());
+ if (std::system(cmd.c_str()) == 0) {
+ return repo_path;
+ }
+ return std::nullopt;
+}
+
+void TestFileRootReadFile(FileRoot const& root) {
+ REQUIRE(root.Exists("foo"));
+ REQUIRE(root.IsFile("foo"));
+ auto foo = root.ReadFile("foo");
+ REQUIRE(foo);
+ CHECK(*foo == "foo");
+
+ REQUIRE(root.Exists("bar"));
+ REQUIRE(root.IsFile("bar"));
+ auto bar = root.ReadFile("bar");
+ REQUIRE(bar);
+ CHECK(*bar == "bar");
+
+ REQUIRE(root.Exists("baz"));
+ CHECK_FALSE(root.IsFile("baz"));
+}
+
+void TestFileRootReadEntries(FileRoot const& root,
+ std::string const& path,
+ bool has_baz) {
+ REQUIRE(root.Exists(path));
+ REQUIRE(root.IsDirectory(path));
+ auto entries = root.ReadDirectory(path);
+
+ CHECK_FALSE(entries.Empty());
+ CHECK(entries.Contains("foo"));
+ CHECK(entries.Contains("bar"));
+ if (has_baz) {
+ CHECK(entries.Contains("baz"));
+ }
+ CHECK_FALSE(entries.Contains("does_not_exist"));
+}
+
+void TestFileRootReadDirectory(FileRoot const& root) {
+ TestFileRootReadEntries(root, ".", true);
+ TestFileRootReadEntries(root, "baz", true);
+ TestFileRootReadEntries(root, "baz/baz", false);
+}
+
+void TestFileRootReadFileType(FileRoot const& root) {
+ auto foo_type = root.FileType("baz/foo");
+ REQUIRE(foo_type);
+ CHECK(*foo_type == ObjectType::File);
+
+ auto bar_type = root.FileType("baz/baz/bar");
+ REQUIRE(bar_type);
+ CHECK(*bar_type == ObjectType::Executable);
+
+ CHECK_FALSE(root.FileType("baz"));
+ CHECK_FALSE(root.FileType("does_not_exist"));
+}
+
+} // namespace
+
+TEST_CASE("Creating file root", "[file_root]") {
+ SECTION("local root") {
+ auto root_path = CreateTestRepo(true);
+ REQUIRE(root_path);
+
+ CHECK(FileRoot{*root_path}.Exists("."));
+ CHECK_FALSE(FileRoot{"does_not_exist"}.Exists("."));
+ }
+
+ SECTION("git root") {
+ auto repo_path = CreateTestRepo(false);
+ REQUIRE(repo_path);
+
+ auto root = FileRoot::FromGit(*repo_path, kTreeId);
+ REQUIRE(root);
+ CHECK(root->Exists("."));
+
+ CHECK_FALSE(FileRoot::FromGit("does_not_exist", kTreeId));
+ }
+}
+
+TEST_CASE("Reading files", "[file_root]") {
+ SECTION("local root") {
+ auto root_path = CreateTestRepo(true);
+ REQUIRE(root_path);
+
+ TestFileRootReadFile(FileRoot{*root_path});
+ }
+
+ SECTION("git root") {
+ auto repo_path = CreateTestRepo(false);
+ REQUIRE(repo_path);
+ auto root = FileRoot::FromGit(*repo_path, kTreeId);
+ REQUIRE(root);
+
+ TestFileRootReadFile(*root);
+ }
+}
+
+TEST_CASE("Reading directories", "[file_root]") {
+ SECTION("local root") {
+ auto root_path = CreateTestRepo(true);
+ REQUIRE(root_path);
+
+ TestFileRootReadDirectory(FileRoot{*root_path});
+ }
+
+ SECTION("git root") {
+ auto repo_path = CreateTestRepo(false);
+ REQUIRE(repo_path);
+ auto root = FileRoot::FromGit(*repo_path, kTreeId);
+ REQUIRE(root);
+
+ TestFileRootReadDirectory(*root);
+ }
+}
+
+TEST_CASE("Reading blobs", "[file_root]") {
+ SECTION("local root") {
+ auto root_path = CreateTestRepo(true);
+ REQUIRE(root_path);
+
+ CHECK_FALSE(FileRoot{*root_path}.ReadBlob(kFooId));
+ }
+
+ SECTION("git root") {
+ auto repo_path = CreateTestRepo(false);
+ REQUIRE(repo_path);
+ auto root = FileRoot::FromGit(*repo_path, kTreeId);
+ REQUIRE(root);
+
+ auto foo = root->ReadBlob(kFooId);
+ REQUIRE(foo);
+ CHECK(*foo == "foo");
+
+ CHECK_FALSE(root->ReadBlob("does_not_exist"));
+ }
+}
+
+TEST_CASE("Reading file type", "[file_root]") {
+ SECTION("local root") {
+ auto root_path = CreateTestRepo(true);
+ REQUIRE(root_path);
+
+ TestFileRootReadFileType(FileRoot{*root_path});
+ }
+
+ SECTION("git root") {
+ auto repo_path = CreateTestRepo(false);
+ REQUIRE(repo_path);
+ auto root = FileRoot::FromGit(*repo_path, kTreeId);
+ REQUIRE(root);
+
+ TestFileRootReadFileType(*root);
+ }
+}
+
+TEST_CASE("Creating artifact descriptions", "[file_root]") {
+ SECTION("local root") {
+ auto root_path = CreateTestRepo(true);
+ REQUIRE(root_path);
+ auto root = FileRoot{*root_path};
+
+ auto desc = root.ToArtifactDescription("baz/foo", "repo");
+ REQUIRE(desc);
+ CHECK(*desc ==
+ ArtifactDescription(std::filesystem::path{"baz/foo"}, "repo"));
+
+ CHECK(root.ToArtifactDescription("does_not_exist", "repo"));
+ }
+
+ SECTION("git root") {
+ auto repo_path = CreateTestRepo(false);
+ REQUIRE(repo_path);
+ auto root = FileRoot::FromGit(*repo_path, kTreeId);
+ REQUIRE(root);
+
+ auto foo = root->ToArtifactDescription("baz/foo", "repo");
+ REQUIRE(foo);
+ CHECK(*foo == ArtifactDescription{
+ ArtifactDigest{kFooId, 3}, ObjectType::File, "repo"});
+
+ auto bar = root->ToArtifactDescription("baz/baz/bar", "repo");
+ REQUIRE(bar);
+ CHECK(*bar == ArtifactDescription{ArtifactDigest{kBarId, 3},
+ ObjectType::Executable,
+ "repo"});
+
+ CHECK_FALSE(root->ToArtifactDescription("baz", "repo"));
+ CHECK_FALSE(root->ToArtifactDescription("does_not_exist", "repo"));
+ }
+}
diff --git a/test/buildtool/file_system/file_system_manager.test.cpp b/test/buildtool/file_system/file_system_manager.test.cpp
new file mode 100644
index 00000000..5444a94b
--- /dev/null
+++ b/test/buildtool/file_system/file_system_manager.test.cpp
@@ -0,0 +1,346 @@
+#include <algorithm>
+#include <cstdlib>
+#include <filesystem>
+#include <iostream>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+
+class CopyFileFixture {
+ public:
+ CopyFileFixture() noexcept {
+ REQUIRE(FileSystemManager::CreateDirectory(to_.parent_path()));
+ }
+ CopyFileFixture(CopyFileFixture const&) = delete;
+ CopyFileFixture(CopyFileFixture&&) = delete;
+ ~CopyFileFixture() noexcept { CHECK(std::filesystem::remove(to_)); }
+ auto operator=(CopyFileFixture const&) -> CopyFileFixture& = delete;
+ auto operator=(CopyFileFixture &&) -> CopyFileFixture& = delete;
+
+ std::filesystem::path const from_{
+ "test/buildtool/file_system/data/example_file"};
+ std::filesystem::path const to_{"./tmp-CopyFile/copied_file"};
+};
+
+class WriteFileFixture {
+ public:
+ WriteFileFixture() noexcept {
+ REQUIRE(FileSystemManager::CreateDirectory(root_dir_));
+ }
+ WriteFileFixture(WriteFileFixture const&) = delete;
+ WriteFileFixture(WriteFileFixture&&) = delete;
+ ~WriteFileFixture() noexcept { CHECK(std::filesystem::remove(file_path_)); }
+ auto operator=(WriteFileFixture const&) -> WriteFileFixture& = delete;
+ auto operator=(WriteFileFixture &&) -> WriteFileFixture& = delete;
+
+ std::filesystem::path const relative_path_parent_{
+ GENERATE(as<std::filesystem::path>{},
+ ".",
+ "level0",
+ "level0/level1",
+ "a/b/c/d",
+ "./a/../e")};
+ std::filesystem::path const root_dir_{"./tmp-RemoveFile"};
+ std::filesystem::path const file_path_{root_dir_ / relative_path_parent_ /
+ "file"};
+};
+
+TEST_CASE("CreateDirectory", "[file_system]") {
+ auto const dir = GENERATE(as<std::filesystem::path>{},
+ "level0",
+ "level0/level1",
+ "a/b/c/d",
+ "./a/../e");
+ CHECK(FileSystemManager::CreateDirectory(dir));
+ CHECK(std::filesystem::exists(dir));
+ CHECK(std::filesystem::is_directory(dir));
+
+ // If we have created the directory already, CreateDirectory() returns true
+ // and the state of things doesn't change
+ CHECK(FileSystemManager::CreateDirectory(dir));
+ CHECK(std::filesystem::exists(dir));
+ CHECK(std::filesystem::is_directory(dir));
+}
+
+TEST_CASE("IsFile", "[file_system]") {
+ CHECK(FileSystemManager::IsFile(
+ "test/buildtool/file_system/data/example_file"));
+ CHECK(FileSystemManager::IsFile(
+ "test/buildtool/file_system/data/empty_executable"));
+ CHECK_FALSE(
+ FileSystemManager::IsFile("test/buildtool/file_system/data/"));
+}
+
+TEST_CASE("IsExecutable", "[file_system]") {
+ CHECK(FileSystemManager::IsExecutable(
+ "test/buildtool/file_system/data/empty_executable"));
+ CHECK_FALSE(FileSystemManager::IsExecutable(
+ "test/buildtool/file_system/data/example_file"));
+ CHECK_FALSE(FileSystemManager::IsExecutable(
+ "test/buildtool/file_system/data/"));
+}
+
+TEST_CASE("Type", "[file_system]") {
+ auto const type_file = FileSystemManager::Type(
+ "test/buildtool/file_system/data/example_file");
+ REQUIRE(type_file);
+ CHECK(*type_file == ObjectType::File);
+
+ auto const type_exec = FileSystemManager::Type(
+ "test/buildtool/file_system/data/empty_executable");
+ REQUIRE(type_exec);
+ CHECK(*type_exec == ObjectType::Executable);
+
+ auto const type_dir =
+ FileSystemManager::Type("test/buildtool/file_system/data/");
+ REQUIRE(type_dir);
+ CHECK(*type_dir == ObjectType::Tree);
+}
+
+TEST_CASE("ChangeDirectory", "[file_system]") {
+ auto const starting_dir = FileSystemManager::GetCurrentDirectory();
+
+ auto const new_dir = GENERATE(as<std::filesystem::path>{},
+ "level0",
+ "level0/level1",
+ "a/b/c/d",
+ "./a/../e");
+
+ REQUIRE(FileSystemManager::CreateDirectory(new_dir));
+ {
+ auto anchor = FileSystemManager::ChangeDirectory(new_dir);
+ CHECK(std::filesystem::equivalent(
+ starting_dir / new_dir, FileSystemManager::GetCurrentDirectory()));
+ }
+ CHECK(starting_dir == FileSystemManager::GetCurrentDirectory());
+}
+
+TEST_CASE("ReadFile", "[file_system]") {
+ SECTION("Existing file") {
+ std::string const expected_content{"test\n"};
+ std::filesystem::path file{"./tmp-ReadFile/file"};
+
+ REQUIRE(FileSystemManager::CreateDirectory(file.parent_path()));
+ std::ofstream writer{file};
+ writer << expected_content;
+ writer.close();
+
+ auto const content = FileSystemManager::ReadFile(file);
+ CHECK(content.has_value());
+ CHECK(content == expected_content);
+ }
+ SECTION("Non-existing file") {
+ std::filesystem::path file{
+ "test/buildtool/file_system/data/this_file_does_not_exist"};
+ REQUIRE(not std::filesystem::exists(file));
+
+ auto const content = FileSystemManager::ReadFile(file);
+ CHECK_FALSE(content.has_value());
+ }
+}
+
+TEST_CASE_METHOD(CopyFileFixture, "CopyFile", "[file_system]") {
+ // Copy file was successful
+ CHECK(FileSystemManager::CopyFile(from_, to_));
+
+ // file exists
+ CHECK(std::filesystem::exists(to_));
+ CHECK(std::filesystem::is_regular_file(to_));
+
+ // Contents are equal
+ auto const content_from = FileSystemManager::ReadFile(from_);
+ CHECK(content_from.has_value());
+ auto const content_to = FileSystemManager::ReadFile(to_);
+ CHECK(content_to.has_value());
+ CHECK(content_from == content_to);
+}
+
+TEST_CASE_METHOD(CopyFileFixture, "CopyFileAs", "[file_system]") {
+ SECTION("as file") {
+ // Copy as file was successful
+ CHECK(FileSystemManager::CopyFileAs(from_, to_, ObjectType::File));
+
+ // file exists
+ CHECK(std::filesystem::exists(to_));
+ CHECK(std::filesystem::is_regular_file(to_));
+ CHECK(not FileSystemManager::IsExecutable(to_));
+
+ // Contents are equal
+ auto const content_from = FileSystemManager::ReadFile(from_);
+ CHECK(content_from.has_value());
+ auto const content_to = FileSystemManager::ReadFile(to_);
+ CHECK(content_to.has_value());
+ CHECK(content_from == content_to);
+
+ // permissions should be 0444 (not writable, but removable)
+ CHECK(not FileSystemManager::WriteFile("replacement content", to_));
+ }
+ SECTION("as executable") {
+ // Copy as file was successful
+ CHECK(
+ FileSystemManager::CopyFileAs(from_, to_, ObjectType::Executable));
+
+ // file exists
+ CHECK(std::filesystem::exists(to_));
+ CHECK(std::filesystem::is_regular_file(to_));
+ CHECK(FileSystemManager::IsExecutable(to_));
+
+ // Contents are equal
+ auto const content_from = FileSystemManager::ReadFile(from_);
+ CHECK(content_from.has_value());
+ auto const content_to = FileSystemManager::ReadFile(to_);
+ CHECK(content_to.has_value());
+ CHECK(content_from == content_to);
+
+ // permissions should be 0555 (not writable, but removable)
+ CHECK(not FileSystemManager::WriteFile("replacement content", to_));
+ }
+}
+
+TEST_CASE("RemoveFile", "[file_system]") {
+ SECTION("Existing file") {
+ std::filesystem::path from{
+ "test/buildtool/file_system/data/example_file"};
+
+ std::filesystem::path to{"./tmp-RemoveFile/copied_file"};
+ REQUIRE(FileSystemManager::CreateDirectory(to.parent_path()));
+
+ CHECK(FileSystemManager::CopyFile(from, to));
+
+ CHECK(std::filesystem::exists(to));
+
+ CHECK(FileSystemManager::RemoveFile(to));
+
+ CHECK(not std::filesystem::exists(to));
+ }
+ SECTION("Non-existing file") {
+ std::filesystem::path file{
+ "test/buildtool/file_system/data/"
+ "this_file_does_not_exist_neither"};
+ CHECK(not std::filesystem::exists(file));
+ CHECK(FileSystemManager::RemoveFile(file)); // nothing to delete
+ }
+ SECTION("Existing but not file") {
+ std::filesystem::path dir{"./tmp-RemoveFile/dir"};
+ CHECK(FileSystemManager::CreateDirectory(dir));
+ CHECK(not FileSystemManager::RemoveFile(dir));
+ CHECK(std::filesystem::exists(dir));
+ }
+}
+
+TEST_CASE_METHOD(WriteFileFixture, "WriteFile", "[file_system]") {
+ std::string const content{"This are the contents\nof the file.\n"};
+
+ CHECK(FileSystemManager::WriteFile(content, file_path_));
+ CHECK(std::filesystem::exists(file_path_));
+ CHECK(std::filesystem::is_directory(file_path_.parent_path()));
+ CHECK(std::filesystem::is_regular_file(file_path_));
+
+ auto const written_content = FileSystemManager::ReadFile(file_path_);
+ CHECK(written_content.has_value());
+ CHECK(written_content == content);
+}
+
+TEST_CASE_METHOD(WriteFileFixture, "WriteFileAs", "[file_system]") {
+ SECTION("as a file") {
+ std::string const content{"This are the contents\nof the file.\n"};
+
+ CHECK(FileSystemManager::WriteFileAs(
+ content, file_path_, ObjectType::File));
+ CHECK(std::filesystem::exists(file_path_));
+ CHECK(std::filesystem::is_directory(file_path_.parent_path()));
+ CHECK(std::filesystem::is_regular_file(file_path_));
+ CHECK(not FileSystemManager::IsExecutable(file_path_));
+
+ auto const written_content = FileSystemManager::ReadFile(file_path_);
+ CHECK(written_content.has_value());
+ CHECK(written_content == content);
+
+ // permissions should be 0444 (not writable, but removable)
+ CHECK(not FileSystemManager::WriteFile("replacement content",
+ file_path_));
+ }
+ SECTION("as an executable") {
+ std::string const content{"\n"};
+
+ CHECK(FileSystemManager::WriteFileAs(
+ content, file_path_, ObjectType::Executable));
+ CHECK(std::filesystem::exists(file_path_));
+ CHECK(std::filesystem::is_directory(file_path_.parent_path()));
+ CHECK(std::filesystem::is_regular_file(file_path_));
+ CHECK(FileSystemManager::IsExecutable(file_path_));
+
+ auto const written_content = FileSystemManager::ReadFile(file_path_);
+ CHECK(written_content.has_value());
+ CHECK(written_content == content);
+
+ // permissions should be 0555 (not writable, but removable)
+ CHECK(not FileSystemManager::WriteFile("replacement content",
+ file_path_));
+ }
+}
+
+TEST_CASE("FileSystemManager", "[file_system]") {
+ // test file and test file content with newline and null characters
+ std::filesystem::path test_file{"test/file"};
+ std::filesystem::path copy_file{"test/copy"};
+ std::string test_content;
+ test_content += "test1";
+ test_content += '\n';
+ test_content += '\0';
+ test_content += "test2";
+
+ CHECK(FileSystemManager::IsRelativePath(test_file));
+ CHECK(not FileSystemManager::IsAbsolutePath(test_file));
+
+ // create parent directory
+ REQUIRE(FileSystemManager::CreateDirectory(test_file.parent_path()));
+
+ // scope to test RAII "DirectoryAnchor" (should restore CWD on destruction)
+ {
+ // change directory and obtain DirectoryAnchor
+ auto anchor =
+ FileSystemManager::ChangeDirectory(test_file.parent_path());
+
+ // assemble file creation command (escape null character)
+ std::string create_file_cmd{};
+ create_file_cmd += "echo -n \"";
+ std::for_each(
+ test_content.begin(), test_content.end(), [&](auto const& c) {
+ if (c == '\0') {
+ create_file_cmd += std::string{"\\0"};
+ }
+ else {
+ create_file_cmd += c;
+ }
+ });
+ create_file_cmd += "\" > " + test_file.filename().string();
+
+ // run file creation command
+ std::system(create_file_cmd.c_str());
+
+ // check if file exists
+ REQUIRE(FileSystemManager::IsFile(test_file.filename()));
+ } // restore CWD to parent path
+
+ // check if file exists with full path
+ REQUIRE(FileSystemManager::IsFile(test_file));
+
+ // read file content and compare with input above
+ auto const file_content = FileSystemManager::ReadFile(test_file);
+ REQUIRE(file_content.has_value());
+ CHECK(file_content == test_content);
+
+ // copy file without 'overwrite'
+ CHECK(FileSystemManager::CopyFile(
+ test_file, copy_file, std::filesystem::copy_options::none));
+
+ // copy file with 'overwrite'
+ CHECK(FileSystemManager::CopyFile(copy_file, test_file));
+
+ // remove files and verify removal
+ CHECK(FileSystemManager::RemoveFile(test_file));
+ CHECK(not FileSystemManager::IsFile(test_file));
+ CHECK(FileSystemManager::RemoveFile(copy_file));
+ CHECK(not FileSystemManager::IsFile(copy_file));
+}
diff --git a/test/buildtool/file_system/git_tree.test.cpp b/test/buildtool/file_system/git_tree.test.cpp
new file mode 100644
index 00000000..caaba96a
--- /dev/null
+++ b/test/buildtool/file_system/git_tree.test.cpp
@@ -0,0 +1,527 @@
+#include <thread>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/file_system/git_tree.hpp"
+#include "test/utils/container_matchers.hpp"
+
+namespace {
+
+auto const kBundlePath =
+ std::string{"test/buildtool/file_system/data/test_repo.bundle"};
+auto const kTreeId = std::string{"e51a219a27b672ccf17abec7d61eb4d6e0424140"};
+auto const kFooId = std::string{"19102815663d23f8b75a47e7a01965dcdc96468c"};
+auto const kBarId = std::string{"ba0e162e1c47469e3fe4b393a8bf8c569f302116"};
+auto const kFailId = std::string{"0123456789abcdef0123456789abcdef01234567"};
+
+[[nodiscard]] auto HexToRaw(std::string const& hex) -> std::string;
+[[nodiscard]] auto RawToHex(std::string const& raw) -> std::string {
+ return ToHexString(raw);
+}
+
+[[nodiscard]] auto GetTestDir() -> std::filesystem::path {
+ auto* tmp_dir = std::getenv("TEST_TMPDIR");
+ if (tmp_dir != nullptr) {
+ return tmp_dir;
+ }
+ return FileSystemManager::GetCurrentDirectory() /
+ "test/buildtool/file_system";
+}
+
+[[nodiscard]] auto CreateTestRepo(bool is_bare = false)
+ -> std::optional<std::filesystem::path> {
+ auto repo_path = GetTestDir() / "test_repo" /
+ std::filesystem::path{std::tmpnam(nullptr)}.filename();
+ auto cmd = fmt::format("git clone {}{} {}",
+ is_bare ? "--bare " : "",
+ kBundlePath,
+ repo_path.string());
+ if (std::system(cmd.c_str()) == 0) {
+ return repo_path;
+ }
+ return std::nullopt;
+}
+
+} // namespace
+
+TEST_CASE("Open Git CAS", "[git_cas]") {
+ SECTION("Bare repository") {
+ auto repo_path = CreateTestRepo(true);
+ REQUIRE(repo_path);
+ CHECK(GitCAS::Open(*repo_path));
+ }
+
+ SECTION("Non-bare repository") {
+ auto repo_path = CreateTestRepo(false);
+ REQUIRE(repo_path);
+ CHECK(GitCAS::Open(*repo_path));
+ }
+
+ SECTION("Non-existing repository") {
+ CHECK_FALSE(GitCAS::Open("does_not_exist"));
+ }
+}
+
+TEST_CASE("Read Git Objects", "[git_cas]") {
+ auto repo_path = CreateTestRepo(true);
+ REQUIRE(repo_path);
+ auto cas = GitCAS::Open(*repo_path);
+ REQUIRE(cas);
+
+ SECTION("valid ids") {
+ CHECK(cas->ReadObject(kFooId, /*is_hex_id=*/true));
+ CHECK(cas->ReadObject(HexToRaw(kFooId), /*is_hex_id=*/false));
+
+ CHECK(cas->ReadObject(kBarId, /*is_hex_id=*/true));
+ CHECK(cas->ReadObject(HexToRaw(kBarId), /*is_hex_id=*/false));
+
+ CHECK(cas->ReadObject(kTreeId, /*is_hex_id=*/true));
+ CHECK(cas->ReadObject(HexToRaw(kTreeId), /*is_hex_id=*/false));
+ }
+
+ SECTION("invalid ids") {
+ CHECK_FALSE(cas->ReadObject("", /*is_hex_id=*/true));
+ CHECK_FALSE(cas->ReadObject("", /*is_hex_id=*/false));
+
+ CHECK_FALSE(cas->ReadObject(kFailId, /*is_hex_id=*/true));
+ CHECK_FALSE(cas->ReadObject(HexToRaw(kFailId), /*is_hex_id=*/false));
+
+ CHECK_FALSE(cas->ReadObject(RawToHex("to_short"), /*is_hex_id=*/true));
+ CHECK_FALSE(cas->ReadObject("to_short", /*is_hex_id=*/false));
+
+ CHECK_FALSE(cas->ReadObject("invalid_chars", /*is_hex_id=*/true));
+ }
+}
+
+TEST_CASE("Read Git Headers", "[git_cas]") {
+ auto repo_path = CreateTestRepo(true);
+ REQUIRE(repo_path);
+ auto cas = GitCAS::Open(*repo_path);
+ REQUIRE(cas);
+
+ SECTION("valid ids") {
+ CHECK(cas->ReadHeader(kFooId, /*is_hex_id=*/true));
+ CHECK(cas->ReadHeader(HexToRaw(kFooId), /*is_hex_id=*/false));
+
+ CHECK(cas->ReadHeader(kBarId, /*is_hex_id=*/true));
+ CHECK(cas->ReadHeader(HexToRaw(kBarId), /*is_hex_id=*/false));
+
+ CHECK(cas->ReadHeader(kTreeId, /*is_hex_id=*/true));
+ CHECK(cas->ReadHeader(HexToRaw(kTreeId), /*is_hex_id=*/false));
+ }
+
+ SECTION("invalid ids") {
+ CHECK_FALSE(cas->ReadHeader("", /*is_hex_id=*/true));
+ CHECK_FALSE(cas->ReadHeader("", /*is_hex_id=*/false));
+
+ CHECK_FALSE(cas->ReadHeader(kFailId, /*is_hex_id=*/true));
+ CHECK_FALSE(cas->ReadHeader(HexToRaw(kFailId), /*is_hex_id=*/false));
+
+ CHECK_FALSE(cas->ReadHeader(RawToHex("to_short"), /*is_hex_id=*/true));
+ CHECK_FALSE(cas->ReadHeader("to_short", /*is_hex_id=*/false));
+
+ CHECK_FALSE(cas->ReadHeader("invalid_chars", /*is_hex_id=*/true));
+ }
+}
+
+TEST_CASE("Read Git Tree", "[git_tree]") {
+ SECTION("Bare repository") {
+ auto repo_path = CreateTestRepo(true);
+ REQUIRE(repo_path);
+ CHECK(GitTree::Read(*repo_path, kTreeId));
+ CHECK_FALSE(GitTree::Read(*repo_path, "wrong_tree_id"));
+ }
+
+ SECTION("Non-bare repository") {
+ auto repo_path = CreateTestRepo(false);
+ REQUIRE(repo_path);
+ CHECK(GitTree::Read(*repo_path, kTreeId));
+ CHECK_FALSE(GitTree::Read(*repo_path, "wrong_tree_id"));
+ }
+}
+
+TEST_CASE("Lookup entries by name", "[git_tree]") {
+ auto repo_path = CreateTestRepo(true);
+ REQUIRE(repo_path);
+ auto tree_root = GitTree::Read(*repo_path, kTreeId);
+ REQUIRE(tree_root);
+
+ auto entry_foo = tree_root->LookupEntryByName("foo");
+ REQUIRE(entry_foo);
+ CHECK(entry_foo->IsBlob());
+ CHECK(entry_foo->Type() == ObjectType::File);
+
+ auto blob_foo = entry_foo->Blob();
+ REQUIRE(blob_foo);
+ CHECK(*blob_foo == "foo");
+ CHECK(blob_foo->size() == 3);
+ CHECK(blob_foo->size() == *entry_foo->Size());
+
+ auto entry_bar = tree_root->LookupEntryByName("bar");
+ REQUIRE(entry_bar);
+ CHECK(entry_bar->IsBlob());
+ CHECK(entry_bar->Type() == ObjectType::Executable);
+
+ auto blob_bar = entry_bar->Blob();
+ REQUIRE(blob_bar);
+ CHECK(*blob_bar == "bar");
+ CHECK(blob_bar->size() == 3);
+ CHECK(blob_bar->size() == *entry_bar->Size());
+
+ auto entry_baz = tree_root->LookupEntryByName("baz");
+ REQUIRE(entry_baz);
+ CHECK(entry_baz->IsTree());
+ CHECK(entry_baz->Type() == ObjectType::Tree);
+
+ SECTION("Lookup missing entries") {
+ CHECK_FALSE(tree_root->LookupEntryByName("fool"));
+ CHECK_FALSE(tree_root->LookupEntryByName("barn"));
+ CHECK_FALSE(tree_root->LookupEntryByName("bazel"));
+ }
+
+ SECTION("Lookup entries in sub-tree") {
+ auto const& tree_baz = entry_baz->Tree();
+ REQUIRE(tree_baz);
+
+ auto entry_baz_foo = tree_baz->LookupEntryByName("foo");
+ REQUIRE(entry_baz_foo);
+ CHECK(entry_baz_foo->IsBlob());
+ CHECK(entry_baz_foo->Hash() == entry_foo->Hash());
+
+ auto entry_baz_bar = tree_baz->LookupEntryByName("bar");
+ REQUIRE(entry_baz_bar);
+ CHECK(entry_baz_bar->IsBlob());
+ CHECK(entry_baz_bar->Hash() == entry_bar->Hash());
+
+ auto entry_baz_baz = tree_baz->LookupEntryByName("baz");
+ REQUIRE(entry_baz_baz);
+ CHECK(entry_baz_baz->IsTree());
+
+ SECTION("Lookup missing entries") {
+ CHECK_FALSE(tree_baz->LookupEntryByName("fool"));
+ CHECK_FALSE(tree_baz->LookupEntryByName("barn"));
+ CHECK_FALSE(tree_baz->LookupEntryByName("bazel"));
+ }
+
+ SECTION("Lookup entries in sub-sub-tree") {
+ auto const& tree_baz_baz = entry_baz_baz->Tree();
+ REQUIRE(tree_baz_baz);
+
+ auto entry_baz_baz_foo = tree_baz_baz->LookupEntryByName("foo");
+ REQUIRE(entry_baz_baz_foo);
+ CHECK(entry_baz_baz_foo->IsBlob());
+ CHECK(entry_baz_baz_foo->Hash() == entry_foo->Hash());
+
+ auto entry_baz_baz_bar = tree_baz_baz->LookupEntryByName("bar");
+ REQUIRE(entry_baz_baz_bar);
+ CHECK(entry_baz_baz_bar->IsBlob());
+ CHECK(entry_baz_baz_bar->Hash() == entry_bar->Hash());
+
+ SECTION("Lookup missing entries") {
+ CHECK_FALSE(tree_baz_baz->LookupEntryByName("fool"));
+ CHECK_FALSE(tree_baz_baz->LookupEntryByName("barn"));
+ CHECK_FALSE(tree_baz_baz->LookupEntryByName("bazel"));
+ }
+ }
+ }
+}
+
+TEST_CASE("Lookup entries by path", "[git_tree]") {
+ auto repo_path = CreateTestRepo(true);
+ REQUIRE(repo_path);
+ auto tree_root = GitTree::Read(*repo_path, kTreeId);
+ REQUIRE(tree_root);
+
+ auto entry_foo = tree_root->LookupEntryByPath("foo");
+ REQUIRE(entry_foo);
+ CHECK(entry_foo->IsBlob());
+ CHECK(entry_foo->Type() == ObjectType::File);
+
+ auto blob_foo = entry_foo->Blob();
+ REQUIRE(blob_foo);
+ CHECK(*blob_foo == "foo");
+ CHECK(blob_foo->size() == 3);
+ CHECK(blob_foo->size() == *entry_foo->Size());
+
+ auto entry_bar = tree_root->LookupEntryByPath("bar");
+ REQUIRE(entry_bar);
+ CHECK(entry_bar->IsBlob());
+ CHECK(entry_bar->Type() == ObjectType::Executable);
+
+ auto blob_bar = entry_bar->Blob();
+ REQUIRE(blob_bar);
+ CHECK(*blob_bar == "bar");
+ CHECK(blob_bar->size() == 3);
+ CHECK(blob_bar->size() == *entry_bar->Size());
+
+ auto entry_baz = tree_root->LookupEntryByPath("baz");
+ REQUIRE(entry_baz);
+ CHECK(entry_baz->IsTree());
+ CHECK(entry_baz->Type() == ObjectType::Tree);
+
+ SECTION("Lookup missing entries") {
+ CHECK_FALSE(tree_root->LookupEntryByPath("fool"));
+ CHECK_FALSE(tree_root->LookupEntryByPath("barn"));
+ CHECK_FALSE(tree_root->LookupEntryByPath("bazel"));
+ }
+
+ SECTION("Lookup entries in sub-tree") {
+ auto entry_baz_foo = tree_root->LookupEntryByPath("baz/foo");
+ REQUIRE(entry_baz_foo);
+ CHECK(entry_baz_foo->IsBlob());
+ CHECK(entry_baz_foo->Hash() == entry_foo->Hash());
+
+ auto entry_baz_bar = tree_root->LookupEntryByPath("baz/bar");
+ REQUIRE(entry_baz_bar);
+ CHECK(entry_baz_bar->IsBlob());
+ CHECK(entry_baz_bar->Hash() == entry_bar->Hash());
+
+ auto entry_baz_baz = tree_root->LookupEntryByPath("baz/baz");
+ REQUIRE(entry_baz_baz);
+ CHECK(entry_baz_baz->IsTree());
+
+ SECTION("Lookup missing entries") {
+ CHECK_FALSE(tree_root->LookupEntryByPath("baz/fool"));
+ CHECK_FALSE(tree_root->LookupEntryByPath("baz/barn"));
+ CHECK_FALSE(tree_root->LookupEntryByPath("baz/bazel"));
+ }
+
+ SECTION("Lookup entries in sub-sub-tree") {
+ auto entry_baz_baz_foo =
+ tree_root->LookupEntryByPath("baz/baz/foo");
+ REQUIRE(entry_baz_baz_foo);
+ CHECK(entry_baz_baz_foo->IsBlob());
+ CHECK(entry_baz_baz_foo->Hash() == entry_foo->Hash());
+
+ auto entry_baz_baz_bar =
+ tree_root->LookupEntryByPath("baz/baz/bar");
+ REQUIRE(entry_baz_baz_bar);
+ CHECK(entry_baz_baz_bar->IsBlob());
+ CHECK(entry_baz_baz_bar->Hash() == entry_bar->Hash());
+
+ SECTION("Lookup missing entries") {
+ CHECK_FALSE(tree_root->LookupEntryByPath("baz/baz/fool"));
+ CHECK_FALSE(tree_root->LookupEntryByPath("baz/baz/barn"));
+ CHECK_FALSE(tree_root->LookupEntryByPath("baz/baz/bazel"));
+ }
+ }
+ }
+}
+
+TEST_CASE("Lookup entries by special names", "[git_tree]") {
+ auto repo_path = CreateTestRepo(true);
+ REQUIRE(repo_path);
+ auto tree_root = GitTree::Read(*repo_path, kTreeId);
+ REQUIRE(tree_root);
+
+ CHECK_FALSE(tree_root->LookupEntryByName(".")); // forbidden
+ CHECK_FALSE(tree_root->LookupEntryByName("..")); // forbidden
+ CHECK_FALSE(tree_root->LookupEntryByName("baz/")); // invalid name
+ CHECK_FALSE(tree_root->LookupEntryByName("baz/foo")); // invalid name
+}
+
+TEST_CASE("Lookup entries by special paths", "[git_tree]") {
+ auto repo_path = CreateTestRepo(true);
+ REQUIRE(repo_path);
+ auto tree_root = GitTree::Read(*repo_path, kTreeId);
+ REQUIRE(tree_root);
+
+ SECTION("valid paths") {
+ CHECK(tree_root->LookupEntryByPath("baz/"));
+ CHECK(tree_root->LookupEntryByPath("baz/foo"));
+ CHECK(tree_root->LookupEntryByPath("baz/../baz/"));
+ CHECK(tree_root->LookupEntryByPath("./baz/"));
+ CHECK(tree_root->LookupEntryByPath("./baz/foo"));
+ CHECK(tree_root->LookupEntryByPath("./baz/../foo"));
+ }
+
+ SECTION("invalid paths") {
+ CHECK_FALSE(tree_root->LookupEntryByPath(".")); // forbidden
+ CHECK_FALSE(tree_root->LookupEntryByPath("..")); // outside of tree
+ CHECK_FALSE(tree_root->LookupEntryByPath("/baz")); // outside of tree
+ CHECK_FALSE(tree_root->LookupEntryByPath("baz/..")); // == '.'
+ }
+}
+
+TEST_CASE("Iterate tree entries", "[git_tree]") {
+ auto repo_path = CreateTestRepo(true);
+ REQUIRE(repo_path);
+ auto tree_root = GitTree::Read(*repo_path, kTreeId);
+ REQUIRE(tree_root);
+
+ std::vector<std::string> names{};
+ for (auto const& [name, entry] : *tree_root) {
+ CHECK(entry);
+ names.emplace_back(name);
+ }
+ CHECK_THAT(names,
+ HasSameUniqueElementsAs<std::vector<std::string>>(
+ {"foo", "bar", "baz"}));
+}
+
+TEST_CASE("Thread-safety", "[git_tree]") {
+ constexpr auto kNumThreads = 100;
+
+ atomic<bool> starting_signal{false};
+ std::vector<std::thread> threads{};
+ threads.reserve(kNumThreads);
+
+ auto repo_path = CreateTestRepo(true);
+ REQUIRE(repo_path);
+
+ SECTION("Opening and reading from the same CAS") {
+ for (int id{}; id < kNumThreads; ++id) {
+ threads.emplace_back(
+ [&repo_path, &starting_signal](int tid) {
+ starting_signal.wait(false);
+
+ auto cas = GitCAS::Open(*repo_path);
+ REQUIRE(cas);
+
+ // every second thread reads bar instead of foo
+ auto id = tid % 2 == 0 ? kFooId : kBarId;
+ CHECK(cas->ReadObject(id, /*is_hex_id=*/true));
+
+ auto header = cas->ReadHeader(id, /*is_hex_id=*/true);
+ CHECK(header->first == 3);
+ CHECK(header->second == ObjectType::File);
+ },
+ id);
+ }
+
+ starting_signal = true;
+ starting_signal.notify_all();
+
+ // wait for threads to finish
+ for (auto& thread : threads) {
+ thread.join();
+ }
+ }
+
+ SECTION("Reading from different trees with same CAS") {
+ for (int id{}; id < kNumThreads; ++id) {
+ threads.emplace_back(
+ [&repo_path, &starting_signal](int tid) {
+ starting_signal.wait(false);
+
+ auto tree_root = GitTree::Read(*repo_path, kTreeId);
+ REQUIRE(tree_root);
+
+ auto entry_subdir = tree_root->LookupEntryByName("baz");
+ REQUIRE(entry_subdir);
+ REQUIRE(entry_subdir->IsTree());
+
+ // every second thread reads subdir instead of root
+ auto const& tree_read =
+ tid % 2 == 0 ? tree_root : entry_subdir->Tree();
+
+ auto entry_foo = tree_read->LookupEntryByName("foo");
+ auto entry_bar = tree_read->LookupEntryByName("bar");
+ REQUIRE(entry_foo);
+ REQUIRE(entry_bar);
+ CHECK(entry_foo->Blob() == "foo");
+ CHECK(entry_bar->Blob() == "bar");
+ },
+ id);
+ }
+
+ starting_signal = true;
+ starting_signal.notify_all();
+
+ // wait for threads to finish
+ for (auto& thread : threads) {
+ thread.join();
+ }
+ }
+
+ SECTION("Reading from the same tree") {
+ auto tree_root = GitTree::Read(*repo_path, kTreeId);
+ REQUIRE(tree_root);
+
+ for (int id{}; id < kNumThreads; ++id) {
+ threads.emplace_back(
+ [&tree_root, &starting_signal](int tid) {
+ // every second thread reads bar instead of foo
+ auto name =
+ tid % 2 == 0 ? std::string{"foo"} : std::string{"bar"};
+
+ starting_signal.wait(false);
+
+ auto entry = tree_root->LookupEntryByName(name);
+ REQUIRE(entry);
+ CHECK(entry->Blob() == name);
+ },
+ id);
+ }
+
+ starting_signal = true;
+ starting_signal.notify_all();
+
+ // wait for threads to finish
+ for (auto& thread : threads) {
+ thread.join();
+ }
+ }
+}
+
+namespace {
+
+auto HexToRaw(std::string const& hex) -> std::string {
+ if (hex.size() % 2 != 0) {
+ return {};
+ }
+ auto conv = [](char c) -> unsigned char {
+ switch (c) {
+ case '0':
+ return 0x0;
+ case '1':
+ return 0x1;
+ case '2':
+ return 0x2;
+ case '3':
+ return 0x3;
+ case '4':
+ return 0x4;
+ case '5':
+ return 0x5; // NOLINT
+ case '6':
+ return 0x6; // NOLINT
+ case '7':
+ return 0x7; // NOLINT
+ case '8':
+ return 0x8; // NOLINT
+ case '9':
+ return 0x9; // NOLINT
+ case 'a':
+ case 'A':
+ return 0xa; // NOLINT
+ case 'b':
+ case 'B':
+ return 0xb; // NOLINT
+ case 'c':
+ case 'C':
+ return 0xc; // NOLINT
+ case 'd':
+ case 'D':
+ return 0xd; // NOLINT
+ case 'e':
+ case 'E':
+ return 0xe; // NOLINT
+ case 'f':
+ case 'F':
+ return 0xf; // NOLINT
+ default:
+ return '\0';
+ }
+ };
+ std::string out{};
+ out.reserve(hex.size() / 2);
+ std::size_t i{};
+ while (i < hex.size()) {
+ auto val = static_cast<unsigned>(conv(hex[i++]) << 4U);
+ out += static_cast<char>(val | conv(hex[i++]));
+ }
+ return out;
+}
+
+} // namespace
diff --git a/test/buildtool/file_system/system_command.test.cpp b/test/buildtool/file_system/system_command.test.cpp
new file mode 100644
index 00000000..81b4e2b6
--- /dev/null
+++ b/test/buildtool/file_system/system_command.test.cpp
@@ -0,0 +1,115 @@
+#include <cstdlib>
+#include <iostream>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/file_system/system_command.hpp"
+
+namespace {
+[[nodiscard]] auto GetTestDir() -> std::filesystem::path {
+ auto* tmp_dir = std::getenv("TEST_TMPDIR");
+ if (tmp_dir != nullptr) {
+ return tmp_dir;
+ }
+ return FileSystemManager::GetCurrentDirectory() /
+ "test/buildtool/file_system";
+}
+} // namespace
+
+TEST_CASE("SystemCommand", "[filesystem]") {
+ using Catch::Matchers::Contains;
+ using Catch::Matchers::StartsWith;
+
+ std::string name{"ExecutorTest"};
+ SystemCommand system{name};
+
+ auto const testdir = GetTestDir();
+
+ SECTION("empty command") {
+ auto tmpdir = testdir / "empty";
+ REQUIRE(FileSystemManager::CreateDirectoryExclusive(tmpdir));
+ auto output = system.Execute(
+ {}, {}, FileSystemManager::GetCurrentDirectory(), tmpdir);
+ CHECK(not output.has_value());
+ }
+
+ SECTION("simple command, no arguments, no env variables") {
+ auto tmpdir = testdir / "simple_noargs";
+ REQUIRE(FileSystemManager::CreateDirectoryExclusive(tmpdir));
+ auto output = system.Execute(
+ {"echo"}, {}, FileSystemManager::GetCurrentDirectory(), tmpdir);
+ REQUIRE(output.has_value());
+ CHECK(output->return_value == 0);
+ CHECK(*FileSystemManager::ReadFile(output->stdout_file) == "\n");
+ CHECK(FileSystemManager::ReadFile(output->stderr_file)->empty());
+ }
+
+ SECTION(
+ "simple command, env variables are expanded only when wrapped with "
+ "/bin/sh") {
+ auto tmpdir = testdir / "simple_env0";
+ REQUIRE(FileSystemManager::CreateDirectoryExclusive(tmpdir));
+ auto output = system.Execute({"echo", "${MY_MESSAGE}"},
+ {{"MY_MESSAGE", "hello"}},
+ FileSystemManager::GetCurrentDirectory(),
+ tmpdir);
+ REQUIRE(output.has_value());
+ CHECK(output->return_value == 0);
+ CHECK(*FileSystemManager::ReadFile(output->stdout_file) ==
+ "${MY_MESSAGE}\n");
+ CHECK(FileSystemManager::ReadFile(output->stderr_file)->empty());
+
+ tmpdir = testdir / "simple_env1";
+ REQUIRE(FileSystemManager::CreateDirectoryExclusive(tmpdir));
+ auto output_wrapped =
+ system.Execute({"/bin/sh", "-c", "set -e\necho ${MY_MESSAGE}"},
+ {{"MY_MESSAGE", "hello"}},
+ FileSystemManager::GetCurrentDirectory(),
+ tmpdir);
+ REQUIRE(output_wrapped.has_value());
+ CHECK(output_wrapped->return_value == 0);
+ CHECK(*FileSystemManager::ReadFile(output_wrapped->stdout_file) ==
+ "hello\n");
+ CHECK(
+ FileSystemManager::ReadFile(output_wrapped->stderr_file)->empty());
+ }
+
+ SECTION("executable, producing std output, std error and return value") {
+ auto tmpdir = testdir / "exe_output";
+ REQUIRE(FileSystemManager::CreateDirectoryExclusive(tmpdir));
+ auto output = system.Execute(
+ {"/bin/sh",
+ "-c",
+ "set -e\necho this is stdout; echo this is stderr >&2; exit 5"},
+ {},
+ FileSystemManager::GetCurrentDirectory(),
+ tmpdir);
+ REQUIRE(output.has_value());
+ CHECK(output->return_value == 5);
+ CHECK(*FileSystemManager::ReadFile(output->stdout_file) ==
+ "this is stdout\n");
+ CHECK(*FileSystemManager::ReadFile(output->stderr_file) ==
+ "this is stderr\n");
+ }
+
+ SECTION(
+ "executable dependent on env, producing std output, std error and "
+ "return value") {
+ auto tmpdir = testdir / "exe_output_from_env";
+ REQUIRE(FileSystemManager::CreateDirectoryExclusive(tmpdir));
+ std::string const stdout = "this is stdout from env var";
+ std::string const stderr = "this is stderr from env var";
+ auto output = system.Execute(
+ {"/bin/sh",
+ "-c",
+ "set -e\necho ${MY_STDOUT}; echo ${MY_STDERR} >&2; exit 5"},
+ {{"MY_STDOUT", stdout}, {"MY_STDERR", stderr}},
+ FileSystemManager::GetCurrentDirectory(),
+ tmpdir);
+ REQUIRE(output.has_value());
+ CHECK(output->return_value == 5);
+ CHECK(*FileSystemManager::ReadFile(output->stdout_file) ==
+ stdout + '\n');
+ CHECK(*FileSystemManager::ReadFile(output->stderr_file) ==
+ stderr + '\n');
+ }
+}
diff --git a/test/buildtool/graph_traverser/TARGETS b/test/buildtool/graph_traverser/TARGETS
new file mode 100644
index 00000000..c31e569a
--- /dev/null
+++ b/test/buildtool/graph_traverser/TARGETS
@@ -0,0 +1,84 @@
+{ "graph_traverser_tests":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["graph_traverser_tests"]
+ , "hdrs": ["graph_traverser.test.hpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["src/buildtool/common", "common"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["src/buildtool/file_system", "jsonfs"]
+ , ["src/buildtool/graph_traverser", "graph_traverser"]
+ , ["test/utils", "test_env"]
+ ]
+ , "stage": ["test", "buildtool", "graph_traverser"]
+ }
+, "graph_traverser_local":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["graph_traverser_local"]
+ , "srcs": ["graph_traverser_local.test.cpp"]
+ , "data": ["test_data"]
+ , "deps":
+ [ "graph_traverser_tests"
+ , ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "local_hermeticity"]
+ ]
+ , "stage": ["test", "buildtool", "graph_traverser"]
+ }
+, "graph_traverser_remote":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["graph_traverser_remote"]
+ , "srcs": ["graph_traverser_remote.test.cpp"]
+ , "data": ["test_data"]
+ , "deps":
+ [ "graph_traverser_tests"
+ , ["@", "catch2", "", "catch2"]
+ , ["test/utils", "catch-main-remote-execution"]
+ ]
+ , "stage": ["test", "buildtool", "graph_traverser"]
+ }
+, "test_data":
+ { "type": ["@", "rules", "data", "staged"]
+ , "srcs":
+ [ "data/copy_local_file/_entry_points"
+ , "data/copy_local_file/copy_me.hpp"
+ , "data/copy_local_file/graph_description"
+ , "data/flaky_hello_world/_entry_points"
+ , "data/flaky_hello_world/_entry_points_ctimes"
+ , "data/flaky_hello_world/_entry_points_stripped"
+ , "data/flaky_hello_world/graph_description"
+ , "data/flaky_hello_world/hello_world.cpp"
+ , "data/hello_world_copy_message/_entry_points"
+ , "data/hello_world_copy_message/_entry_points_get_executable"
+ , "data/hello_world_copy_message/_entry_points_upload_source"
+ , "data/hello_world_copy_message/graph_description"
+ , "data/hello_world_copy_message/hello_world.cpp"
+ , "data/hello_world_known_source/_entry_points"
+ , "data/hello_world_known_source/graph_description"
+ , "data/sequence_printer_build_library_only/_entry_points"
+ , "data/sequence_printer_build_library_only/_entry_points_full_build"
+ , "data/sequence_printer_build_library_only/graph_description"
+ , "data/sequence_printer_build_library_only/main.cpp"
+ , "data/sequence_printer_build_library_only/printer/printer.hpp"
+ , "data/sequence_printer_build_library_only/sequences/fibonacci.cpp"
+ , "data/sequence_printer_build_library_only/sequences/fibonacci.hpp"
+ , "data/sequence_printer_build_library_only/sequences/random_dna_sequence.cpp"
+ , "data/sequence_printer_build_library_only/sequences/random_dna_sequence.hpp"
+ , "data/sequence_printer_build_library_only/sequences/sequence.hpp"
+ , "data/use_env_variables/_entry_points"
+ , "data/use_env_variables/graph_description"
+ , "data/use_nested_trees/_entry_points"
+ , "data/use_nested_trees/graph_description"
+ , "data/use_trees/_entry_points"
+ , "data/use_trees/graph_description"
+ , "data/use_uploaded_blobs/_entry_points"
+ , "data/use_uploaded_blobs/graph_description"
+ ]
+ , "stage": ["test", "buildtool", "graph_traverser"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps": ["graph_traverser_local", "graph_traverser_remote"]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/graph_traverser/data/copy_local_file/_entry_points b/test/buildtool/graph_traverser/data/copy_local_file/_entry_points
new file mode 100644
index 00000000..9e5c35c0
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/copy_local_file/_entry_points
@@ -0,0 +1,9 @@
+{
+ "copied.hpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "copy_me.hpp",
+ "repository": ""
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/copy_local_file/copy_me.hpp b/test/buildtool/graph_traverser/data/copy_local_file/copy_me.hpp
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/copy_local_file/copy_me.hpp
diff --git a/test/buildtool/graph_traverser/data/copy_local_file/graph_description b/test/buildtool/graph_traverser/data/copy_local_file/graph_description
new file mode 100644
index 00000000..692211f1
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/copy_local_file/graph_description
@@ -0,0 +1,5 @@
+{
+ "blobs": [],
+ "trees": {},
+ "actions": {}
+}
diff --git a/test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points b/test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points
new file mode 100644
index 00000000..eb31cca8
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points
@@ -0,0 +1,9 @@
+{
+ "greeting_output": {
+ "type": "ACTION",
+ "data": {
+ "id": "make_output",
+ "path": "greeting_output"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points_ctimes b/test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points_ctimes
new file mode 100644
index 00000000..b13e8e03
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points_ctimes
@@ -0,0 +1,9 @@
+{
+ "ctimes": {
+ "type": "ACTION",
+ "data": {
+ "id": "list_ctimes",
+ "path": "ctimes"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points_stripped b/test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points_stripped
new file mode 100644
index 00000000..ebd4b856
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/flaky_hello_world/_entry_points_stripped
@@ -0,0 +1,9 @@
+{
+ "stripped_greeting_output": {
+ "type": "ACTION",
+ "data": {
+ "id": "strip_time",
+ "path": "stripped_greeting_output"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/flaky_hello_world/graph_description b/test/buildtool/graph_traverser/data/flaky_hello_world/graph_description
new file mode 100644
index 00000000..8e5ee00e
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/flaky_hello_world/graph_description
@@ -0,0 +1,83 @@
+{
+ "actions": {
+ "make_exe": {
+ "output": [
+ "hello_world"
+ ],
+ "command": [
+ "g++", "hello_world.cpp", "-o", "hello_world"
+ ],
+ "input": {
+ "hello_world.cpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "hello_world.cpp",
+ "repository": ""
+ }
+ }
+ },
+ "env": {
+ "PATH": "/bin:/usr/bin"
+ }
+ },
+ "make_output": {
+ "output": [
+ "greeting_output"
+ ],
+ "command": [
+ "/bin/sh",
+ "-c",
+ "set -e\n./hello_world > greeting_output"
+ ],
+ "input": {
+ "hello_world": {
+ "type": "ACTION",
+ "data": {
+ "id": "make_exe",
+ "path": "hello_world"
+ }
+ }
+ }
+ },
+ "strip_time": {
+ "output": [
+ "stripped_greeting_output"
+ ],
+ "command": [
+ "/bin/sh",
+ "-c",
+ "set -e\nhead -n1 greeting_output > stripped_greeting_output"
+ ],
+ "input": {
+ "greeting_output": {
+ "type": "ACTION",
+ "data": {
+ "id": "make_output",
+ "path": "greeting_output"
+ }
+ }
+ }
+ },
+ "list_ctimes": {
+ "output": [
+ "ctimes"
+ ],
+ "command": [
+ "/bin/sh",
+ "-c",
+ "set -e\nls --full-time --time ctime > ctimes"
+ ],
+ "input": {
+ "stripped_greeting_output": {
+ "type": "ACTION",
+ "data": {
+ "id": "strip_time",
+ "path": "stripped_greeting_output"
+ }
+ }
+ }
+ }
+ },
+ "blobs": [],
+ "trees": {}
+}
diff --git a/test/buildtool/graph_traverser/data/flaky_hello_world/hello_world.cpp b/test/buildtool/graph_traverser/data/flaky_hello_world/hello_world.cpp
new file mode 100644
index 00000000..38bc23ac
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/flaky_hello_world/hello_world.cpp
@@ -0,0 +1,6 @@
+#include <iostream>
+
+int main() {
+ std::cout << "Hello, World!" << std::endl
+ << "It's now " << __TIME__ << std::endl;
+}
diff --git a/test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points b/test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points
new file mode 100644
index 00000000..eb31cca8
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points
@@ -0,0 +1,9 @@
+{
+ "greeting_output": {
+ "type": "ACTION",
+ "data": {
+ "id": "make_output",
+ "path": "greeting_output"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points_get_executable b/test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points_get_executable
new file mode 100755
index 00000000..7de0b8b7
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points_get_executable
@@ -0,0 +1,9 @@
+{
+ "executable": {
+ "type": "ACTION",
+ "data": {
+ "id": "make_exe",
+ "path": "hello_world"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points_upload_source b/test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points_upload_source
new file mode 100644
index 00000000..046414c8
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/hello_world_copy_message/_entry_points_upload_source
@@ -0,0 +1,9 @@
+{
+ "local_src_file.cpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "hello_world.cpp",
+ "repository": ""
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/hello_world_copy_message/graph_description b/test/buildtool/graph_traverser/data/hello_world_copy_message/graph_description
new file mode 100644
index 00000000..829a3249
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/hello_world_copy_message/graph_description
@@ -0,0 +1,45 @@
+{
+ "actions": {
+ "make_exe": {
+ "output": [
+ "hello_world"
+ ],
+ "command": [
+ "g++", "hello_world.cpp", "-o", "hello_world"
+ ],
+ "input": {
+ "hello_world.cpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "hello_world.cpp",
+ "repository": ""
+ }
+ }
+ },
+ "env": {
+ "PATH": "/bin:/usr/bin"
+ }
+ },
+ "make_output": {
+ "output": [
+ "greeting_output"
+ ],
+ "command": [
+ "/bin/sh",
+ "-c",
+ "set -e\n./hello_world > greeting_output\nls -alR\n"
+ ],
+ "input": {
+ "hello_world": {
+ "type": "ACTION",
+ "data": {
+ "id": "make_exe",
+ "path": "hello_world"
+ }
+ }
+ }
+ }
+ },
+ "blobs": [],
+ "trees": {}
+}
diff --git a/test/buildtool/graph_traverser/data/hello_world_copy_message/hello_world.cpp b/test/buildtool/graph_traverser/data/hello_world_copy_message/hello_world.cpp
new file mode 100644
index 00000000..980270b9
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/hello_world_copy_message/hello_world.cpp
@@ -0,0 +1,5 @@
+#include <iostream>
+
+int main() {
+ std::cout << "Hello, World!" << std::endl;
+}
diff --git a/test/buildtool/graph_traverser/data/hello_world_known_source/_entry_points b/test/buildtool/graph_traverser/data/hello_world_known_source/_entry_points
new file mode 100644
index 00000000..eb31cca8
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/hello_world_known_source/_entry_points
@@ -0,0 +1,9 @@
+{
+ "greeting_output": {
+ "type": "ACTION",
+ "data": {
+ "id": "make_output",
+ "path": "greeting_output"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/hello_world_known_source/graph_description b/test/buildtool/graph_traverser/data/hello_world_known_source/graph_description
new file mode 100644
index 00000000..ae21c957
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/hello_world_known_source/graph_description
@@ -0,0 +1,46 @@
+{
+ "actions": {
+ "make_exe": {
+ "output": [
+ "hello_world"
+ ],
+ "command": [
+ "g++", "hello_world.cpp", "-o", "hello_world"
+ ],
+ "input": {
+ "hello_world.cpp": {
+ "type": "KNOWN",
+ "data": {
+ "id": "980270b9f3ec439239ae7633fe5811bc57a6ec3f",
+ "size": 83,
+ "file_type": "f"
+ }
+ }
+ },
+ "env": {
+ "PATH": "/bin:/usr/bin"
+ }
+ },
+ "make_output": {
+ "output": [
+ "greeting_output"
+ ],
+ "command": [
+ "/bin/sh",
+ "-c",
+ "set -e\n./hello_world > greeting_output\nls -alR\n"
+ ],
+ "input": {
+ "hello_world": {
+ "type": "ACTION",
+ "data": {
+ "id": "make_exe",
+ "path": "hello_world"
+ }
+ }
+ }
+ }
+ },
+ "blobs": [],
+ "trees": {}
+}
diff --git a/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/_entry_points b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/_entry_points
new file mode 100644
index 00000000..6cd4463a
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/_entry_points
@@ -0,0 +1,9 @@
+{
+ "sequences.a": {
+ "type": "ACTION",
+ "data": {
+ "id": "sequences/link",
+ "path": "lib.a"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/_entry_points_full_build b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/_entry_points_full_build
new file mode 100644
index 00000000..a4d9713d
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/_entry_points_full_build
@@ -0,0 +1,9 @@
+{
+ "sequence_printer.out": {
+ "type": "ACTION",
+ "data": {
+ "id": "main/link",
+ "path": "a.out"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/graph_description b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/graph_description
new file mode 100644
index 00000000..3540147c
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/graph_description
@@ -0,0 +1,173 @@
+{
+ "blobs": [],
+ "trees": {},
+ "actions": {
+ "sequences/random_dna_sequence/compile": {
+ "input": {
+ "sequence.hpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "sequences/sequence.hpp",
+ "repository": ""
+ }
+ },
+ "random_dna_sequence.hpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "sequences/random_dna_sequence.hpp",
+ "repository": ""
+ }
+ },
+ "random_dna_sequence.cpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "sequences/random_dna_sequence.cpp",
+ "repository": ""
+ }
+ }
+ },
+ "output": [
+ "obj.o"
+ ],
+ "command": [
+ "c++", "-o", "obj.o", "-c", "random_dna_sequence.cpp"
+ ],
+ "env": {
+ "PATH": "/bin:/usr/bin"
+ }
+ },
+ "sequences/fibonacci/compile": {
+ "input": {
+ "sequence.hpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "sequences/sequence.hpp",
+ "repository": ""
+ }
+ },
+ "fibonacci.hpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "sequences/fibonacci.hpp",
+ "repository": ""
+ }
+ },
+ "fibonacci.cpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "sequences/fibonacci.cpp",
+ "repository": ""
+ }
+ }
+ },
+ "output": [
+ "obj.o"
+ ],
+ "command": [
+ "c++", "-o", "obj.o", "-c", "fibonacci.cpp"
+ ],
+ "env": {
+ "PATH": "/bin:/usr/bin"
+ }
+ },
+ "sequences/link": {
+ "input": {
+ "obj1.o": {
+ "type": "ACTION",
+ "data": {
+ "id": "sequences/random_dna_sequence/compile",
+ "path": "obj.o"
+ }
+ },
+ "obj2.o": {
+ "type": "ACTION",
+ "data": {
+ "id": "sequences/fibonacci/compile",
+ "path": "obj.o"
+ }
+ }
+ },
+ "output": [
+ "lib.a"
+ ],
+ "command": [
+ "ar", "cqs", "lib.a", "obj1.o", "obj2.o"
+ ]
+ },
+ "main/compile": {
+ "input": {
+ "fibonacci.hpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "sequences/fibonacci.hpp",
+ "repository": ""
+ }
+ },
+ "random_dna_sequence.hpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "sequences/random_dna_sequence.hpp",
+ "repository": ""
+ }
+ },
+ "sequence.hpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "sequences/sequence.hpp",
+ "repository": ""
+ }
+ },
+ "printer.hpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "printer/printer.hpp",
+ "repository": ""
+ }
+ },
+ "main.cpp": {
+ "type": "LOCAL",
+ "data": {
+ "path": "main.cpp",
+ "repository": ""
+ }
+ }
+ },
+ "output": [
+ "obj.o"
+ ],
+ "command": [
+ "c++", "-o", "obj.o", "-c", "main.cpp"
+ ],
+ "env": {
+ "PATH": "/bin:/usr/bin"
+ }
+ },
+ "main/link": {
+ "input": {
+ "lib1.a": {
+ "type": "ACTION",
+ "data": {
+ "id": "sequences/link",
+ "path": "lib.a"
+ }
+ },
+ "obj1.o": {
+ "type": "ACTION",
+ "data": {
+ "id": "main/compile",
+ "path": "obj.o"
+ }
+ }
+ },
+ "output": [
+ "a.out"
+ ],
+ "command": [
+ "c++", "-o", "a.out", "obj1.o", "lib1.a"
+ ],
+ "env": {
+ "PATH": "/bin:/usr/bin"
+ }
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/main.cpp b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/main.cpp
new file mode 100644
index 00000000..4bee9b2c
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/main.cpp
@@ -0,0 +1,27 @@
+#include <iostream>
+
+#include "fibonacci.hpp"
+#include "printer.hpp"
+#include "random_dna_sequence.hpp"
+
+int main() {
+ Printer printer;
+
+ Fibonacci fib;
+ std::cout
+ << "PRINT 10 following terms of Fibonacci sequence starting with 0 1"
+ << std::endl;
+ printer.print(fib, 10U);
+ std::cout << std::endl;
+
+ Fibonacci fib2_5{2, 5};
+ std::cout
+ << "PRINT 8 following terms of Fibonacci sequence starting with 2 5"
+ << std::endl;
+ printer.print(fib2_5, 8U);
+ std::cout << std::endl;
+
+ RandomDNASequence piece_of_something;
+ std::cout << "PRINT a random dna sequence of length 3" << std::endl;
+ printer.print(piece_of_something, 30U);
+}
diff --git a/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/printer/printer.hpp b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/printer/printer.hpp
new file mode 100644
index 00000000..e48272af
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/printer/printer.hpp
@@ -0,0 +1,18 @@
+#pragma once
+
+#include <iostream>
+
+class Printer {
+ public:
+ template <class SequenceT>
+ void print(SequenceT& seq, unsigned int number_of_terms) {
+ if (number_of_terms == 0) {
+ std::cout << std::endl;
+ return;
+ }
+ for (unsigned int i = 0; i < number_of_terms - 1; ++i) {
+ std::cout << seq.next() << seq.separator();
+ }
+ std::cout << seq.next() << std::endl;
+ }
+};
diff --git a/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/fibonacci.cpp b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/fibonacci.cpp
new file mode 100644
index 00000000..412bd2a0
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/fibonacci.cpp
@@ -0,0 +1,17 @@
+#include "fibonacci.hpp"
+
+Fibonacci::Fibonacci() : second_prev_{0}, prev_{1} {}
+
+Fibonacci::Fibonacci(int zero_th, int first)
+ : second_prev_{zero_th}, prev_{first} {}
+
+int Fibonacci::next() {
+ int next = second_prev_ + prev_;
+ second_prev_ = prev_;
+ prev_ = next;
+ return next;
+}
+
+std::string Fibonacci::separator() {
+ return ", ";
+}
diff --git a/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/fibonacci.hpp b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/fibonacci.hpp
new file mode 100644
index 00000000..ae3a5c24
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/fibonacci.hpp
@@ -0,0 +1,17 @@
+#pragma once
+
+#include <string>
+#include "sequence.hpp"
+
+class Fibonacci : public Sequence<int> {
+ public:
+ Fibonacci();
+ Fibonacci(int zeroth, int first);
+ ~Fibonacci() override = default;
+ int next() override;
+ std::string separator() override;
+
+ private:
+ int second_prev_;
+ int prev_;
+};
diff --git a/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/random_dna_sequence.cpp b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/random_dna_sequence.cpp
new file mode 100644
index 00000000..7f6e5dea
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/random_dna_sequence.cpp
@@ -0,0 +1,25 @@
+#include "random_dna_sequence.hpp"
+
+RandomDNASequence::RandomDNASequence()
+ : eng_(static_cast<unsigned int>(
+ std::chrono::system_clock::now().time_since_epoch().count())),
+ dist_(0, 3) {}
+
+RandomDNASequence::RandomDNASequence(unsigned int seed)
+ : eng_(seed), dist_(0, 3) {}
+
+char RandomDNASequence::next() {
+ int option = dist_(eng_);
+ if (option == 0)
+ return 'A';
+ if (option == 1)
+ return 'C';
+ if (option == 2)
+ return 'G';
+
+ return 'T';
+}
+
+std::string RandomDNASequence::separator() {
+ return "";
+}
diff --git a/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/random_dna_sequence.hpp b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/random_dna_sequence.hpp
new file mode 100644
index 00000000..aad9da68
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/random_dna_sequence.hpp
@@ -0,0 +1,19 @@
+#pragma once
+
+#include <chrono>
+#include <random>
+#include <string>
+#include "sequence.hpp"
+
+class RandomDNASequence : public Sequence<char> {
+ public:
+ RandomDNASequence();
+ explicit RandomDNASequence(unsigned int seed);
+ ~RandomDNASequence() override = default;
+ char next() override;
+ std::string separator() override;
+
+ private:
+ std::default_random_engine eng_;
+ std::uniform_int_distribution<> dist_;
+};
diff --git a/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/sequence.hpp b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/sequence.hpp
new file mode 100644
index 00000000..d5d80af0
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/sequence_printer_build_library_only/sequences/sequence.hpp
@@ -0,0 +1,15 @@
+#pragma once
+
+#include <string>
+
+template <typename T>
+class Sequence {
+ public:
+ typedef T value_type;
+
+ virtual T next() = 0;
+
+ virtual std::string separator() = 0;
+
+ virtual ~Sequence() = default;
+};
diff --git a/test/buildtool/graph_traverser/data/use_env_variables/_entry_points b/test/buildtool/graph_traverser/data/use_env_variables/_entry_points
new file mode 100644
index 00000000..c454bd16
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/use_env_variables/_entry_points
@@ -0,0 +1,9 @@
+{
+ "out": {
+ "type": "ACTION",
+ "data": {
+ "id": "write_to_file",
+ "path": "out"
+ }
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/graph_traverser/data/use_env_variables/graph_description b/test/buildtool/graph_traverser/data/use_env_variables/graph_description
new file mode 100644
index 00000000..728ecb16
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/use_env_variables/graph_description
@@ -0,0 +1,18 @@
+{
+ "blobs": [],
+ "trees": {},
+ "actions": {
+ "write_to_file": {
+ "output": ["out"],
+ "command": [
+ "/bin/sh",
+ "-c",
+ "set -e\necho -n ${MYCONTENT} > out"
+ ],
+ "env": {
+ "MYCONTENT": "content from environment variable",
+ "UNUSED_VAR": "nothing important"
+ }
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/use_nested_trees/_entry_points b/test/buildtool/graph_traverser/data/use_nested_trees/_entry_points
new file mode 100644
index 00000000..9ee98967
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/use_nested_trees/_entry_points
@@ -0,0 +1,9 @@
+{
+ "statement": {
+ "type": "ACTION",
+ "data": {
+ "id": "write_test",
+ "path": "statement"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/use_nested_trees/graph_description b/test/buildtool/graph_traverser/data/use_nested_trees/graph_description
new file mode 100644
index 00000000..201dd5b0
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/use_nested_trees/graph_description
@@ -0,0 +1,51 @@
+{
+ "blobs": ["test to check if blobs are uploaded", "this"],
+ "trees": {
+ "4d8756372185a2f0c7a1c63ae4aec5080d5abb0e": {
+ "subject": {
+ "type": "KNOWN",
+ "data": {
+ "id": "a2a3f4f1e30c488bfbd52aabfbcfcc1f5822158d",
+ "size": 4,
+ "file_type": "f"
+ }
+ }
+ },
+ "a00e8ce5eec43c5d77b54d0df6016df943ed05da": {
+ "nested": {
+ "type": "TREE",
+ "data": {
+ "id": "4d8756372185a2f0c7a1c63ae4aec5080d5abb0e"
+ }
+ }
+ }
+ },
+ "actions": {
+ "write_test": {
+ "input": {
+ "main": {
+ "type": "TREE",
+ "data": {
+ "id": "a00e8ce5eec43c5d77b54d0df6016df943ed05da"
+ }
+ },
+ "thing": {
+ "type": "KNOWN",
+ "data": {
+ "id": "d4d7eecc25bcbd902b0b97a97e8e2e478c97454b",
+ "size": 35,
+ "file_type": "f"
+ }
+ }
+ },
+ "output": [
+ "statement"
+ ],
+ "command": [
+ "/bin/sh",
+ "-c",
+ "set -e\necho -n \"$(cat main/nested/subject) is a $(cat thing)\" > statement"
+ ]
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/use_trees/_entry_points b/test/buildtool/graph_traverser/data/use_trees/_entry_points
new file mode 100644
index 00000000..79b23c86
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/use_trees/_entry_points
@@ -0,0 +1,9 @@
+{
+ "statement": {
+ "type": "ACTION",
+ "data": {
+ "id": "read_test",
+ "path": "statement"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/use_trees/graph_description b/test/buildtool/graph_traverser/data/use_trees/graph_description
new file mode 100644
index 00000000..26e43ade
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/use_trees/graph_description
@@ -0,0 +1,70 @@
+{
+ "blobs": ["test to check if blobs are uploaded", "this"],
+ "trees": {
+ "a6a2c1940ec94d9b93b86cf2c73d434402eb36ae": {
+ "subject": {
+ "type": "KNOWN",
+ "data": {
+ "id": "a2a3f4f1e30c488bfbd52aabfbcfcc1f5822158d",
+ "size": 4,
+ "file_type": "f"
+ }
+ },
+ "thing": {
+ "type": "KNOWN",
+ "data": {
+ "id": "d4d7eecc25bcbd902b0b97a97e8e2e478c97454b",
+ "size": 35,
+ "file_type": "f"
+ }
+ }
+ },
+ "4a331831df3699e3e37ea31ac15c3b9cc318e06d": {
+ "test_data": {
+ "type": "ACTION",
+ "data": {
+ "id": "write_test",
+ "path": "statement"
+ }
+ }
+ }
+ },
+ "actions": {
+ "write_test": {
+ "input": {
+ ".": {
+ "type": "TREE",
+ "data": {
+ "id": "a6a2c1940ec94d9b93b86cf2c73d434402eb36ae"
+ }
+ }
+ },
+ "output": [
+ "statement"
+ ],
+ "command": [
+ "/bin/sh",
+ "-c",
+ "set -e\necho -n \"$(cat subject) is a $(cat thing)\" > statement"
+ ]
+ },
+ "read_test": {
+ "input": {
+ "data": {
+ "type": "TREE",
+ "data": {
+ "id": "4a331831df3699e3e37ea31ac15c3b9cc318e06d"
+ }
+ }
+ },
+ "output": [
+ "statement"
+ ],
+ "command": [
+ "/bin/sh",
+ "-c",
+ "cat data/test_data > statement"
+ ]
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/use_uploaded_blobs/_entry_points b/test/buildtool/graph_traverser/data/use_uploaded_blobs/_entry_points
new file mode 100644
index 00000000..9ee98967
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/use_uploaded_blobs/_entry_points
@@ -0,0 +1,9 @@
+{
+ "statement": {
+ "type": "ACTION",
+ "data": {
+ "id": "write_test",
+ "path": "statement"
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/data/use_uploaded_blobs/graph_description b/test/buildtool/graph_traverser/data/use_uploaded_blobs/graph_description
new file mode 100644
index 00000000..dd7c87bb
--- /dev/null
+++ b/test/buildtool/graph_traverser/data/use_uploaded_blobs/graph_description
@@ -0,0 +1,34 @@
+{
+ "blobs": ["test to check if blobs are uploaded", "this"],
+ "trees": {},
+ "actions": {
+ "write_test": {
+ "input": {
+ "subject": {
+ "type": "KNOWN",
+ "data": {
+ "id": "a2a3f4f1e30c488bfbd52aabfbcfcc1f5822158d",
+ "size": 4,
+ "file_type": "f"
+ }
+ },
+ "thing": {
+ "type": "KNOWN",
+ "data": {
+ "id": "d4d7eecc25bcbd902b0b97a97e8e2e478c97454b",
+ "size": 35,
+ "file_type": "f"
+ }
+ }
+ },
+ "output": [
+ "statement"
+ ],
+ "command": [
+ "/bin/sh",
+ "-c",
+ "set -e\necho -n \"$(cat subject) is a $(cat thing)\" > statement"
+ ]
+ }
+ }
+}
diff --git a/test/buildtool/graph_traverser/graph_traverser.test.hpp b/test/buildtool/graph_traverser/graph_traverser.test.hpp
new file mode 100644
index 00000000..d9aea69d
--- /dev/null
+++ b/test/buildtool/graph_traverser/graph_traverser.test.hpp
@@ -0,0 +1,412 @@
+#ifndef INCLUDED_SRC_TEST_BUILDTOOL_GRAPH_GRAVERSER_GRAPH_TRAVERSER_TEST_HPP
+#define INCLUDED_SRC_TEST_BUILDTOOL_GRAPH_GRAVERSER_GRAPH_TRAVERSER_TEST_HPP
+
+#include <filesystem>
+#include <sstream>
+#include <utility>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/common/statistics.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/file_system/jsonfs.hpp"
+#include "src/buildtool/graph_traverser/graph_traverser.hpp"
+#include "src/buildtool/logging/logger.hpp"
+#include "test/utils/test_env.hpp"
+#include "src/utils/cpp/json.hpp"
+
+// NOLINTNEXTLINE(google-build-namespaces)
+namespace {
+
+class TestProject {
+ public:
+ struct CommandLineArguments {
+ GraphTraverser::CommandLineArguments gtargs;
+ nlohmann::json artifacts{};
+ std::filesystem::path graph_description{};
+
+ explicit CommandLineArguments(
+ GraphTraverser::CommandLineArguments gtargs)
+ : gtargs{std::move(std::move(gtargs))} {}
+ };
+
+ // NOLINTNEXTLINE(modernize-pass-by-value)
+ explicit TestProject(std::string const& example_name,
+ bool run_local = false)
+ : example_name_{example_name},
+ root_dir_{kWorkspacePrefix / example_name_},
+ run_local_{run_local} {
+ SetupConfig();
+ }
+
+ explicit TestProject(std::string&& example_name, bool run_local = false)
+ : example_name_{std::move(example_name)},
+ root_dir_{kWorkspacePrefix / example_name_},
+ run_local_{run_local} {
+ SetupConfig();
+ }
+
+ /// \brief Get command line arguments parsing entry points file in
+ /// data/<example-name>/<entry_points_filename>, where
+ /// <entry_points_filename> takes "_entry_points" as default value
+ auto CmdLineArgs(std::string const& entry_points_filename =
+ kDefaultEntryPointsFileName) -> CommandLineArguments {
+ auto const entry_points_file = root_dir_ / entry_points_filename;
+ if (not FileSystemManager::IsFile(entry_points_file)) {
+ Logger::Log(
+ LogLevel::Error,
+ "file with entry points for graph_traverser tests can not be "
+ "found in path {}",
+ entry_points_file.string());
+ std::exit(EXIT_FAILURE);
+ }
+ auto const entry_points_json = Json::ReadFile(entry_points_file);
+ if (not entry_points_json.has_value()) {
+ Logger::Log(LogLevel::Error,
+ "can not read {} for graph_traverser tests",
+ entry_points_file.string());
+ std::exit(EXIT_FAILURE);
+ }
+ return GenerateFromEntryPoints(*entry_points_json);
+ }
+
+ private:
+ static inline std::filesystem::path const kOutputDirPrefix =
+ FileSystemManager::GetCurrentDirectory() / "./tmp-";
+ static inline std::filesystem::path const kWorkspacePrefix =
+ FileSystemManager::GetCurrentDirectory() /
+ "test/buildtool/graph_traverser/data/";
+ static inline std::string const kDefaultEntryPointsFileName =
+ "_entry_points";
+ std::string example_name_{};
+ std::filesystem::path root_dir_{};
+ static inline int id_{};
+ bool run_local_{};
+
+ void SetupConfig() {
+ auto info = RepositoryConfig::RepositoryInfo{FileRoot{root_dir_}};
+ RepositoryConfig::Instance().Reset();
+ RepositoryConfig::Instance().SetInfo("", std::move(info));
+ }
+
+ auto GenerateFromEntryPoints(nlohmann::json const& entry_points)
+ -> CommandLineArguments {
+
+ GraphTraverser::CommandLineArguments gtargs{0, {}, {}, {}, {}};
+
+ CommandLineArguments clargs{gtargs};
+ clargs.artifacts = entry_points;
+ clargs.graph_description = root_dir_ / "graph_description";
+ clargs.gtargs.jobs = std::max(1U, std::thread::hardware_concurrency());
+ clargs.gtargs.stage = StageArguments{
+ kOutputDirPrefix / (example_name_ + std::to_string(id_++))};
+ if (not run_local_) {
+ clargs.gtargs.endpoint.remote_execution_address =
+ ReadRemoteAddressFromEnv();
+ clargs.gtargs.build.platform_properties =
+ ReadPlatformPropertiesFromEnv();
+ if (not clargs.gtargs.endpoint.remote_execution_address) {
+ Logger::Log(LogLevel::Error,
+ "Missing env var 'REMOTE_EXECUTION_ADDRESS' for "
+ "non-local graph_traverser tests.");
+ std::exit(EXIT_FAILURE);
+ }
+ }
+ return clargs;
+ }
+};
+
+} // namespace
+
+[[maybe_unused]] static void TestHelloWorldCopyMessage(
+ bool run_local,
+ bool is_hermetic = true) {
+ TestProject p("hello_world_copy_message", run_local);
+
+ auto const clargs = p.CmdLineArgs();
+ GraphTraverser const gt{clargs.gtargs};
+ auto const output_paths =
+ gt.BuildAndStage(clargs.graph_description, clargs.artifacts);
+
+ REQUIRE(output_paths);
+ REQUIRE(output_paths->first.size() == 1);
+ CHECK(FileSystemManager::IsFile(output_paths->first.at(0)));
+ auto const contents =
+ FileSystemManager::ReadFile(output_paths->first.at(0));
+ CHECK(contents.has_value());
+ CHECK(contents == "Hello, World!\n");
+
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == 2);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == 0);
+ }
+
+ SECTION("Executable is retrieved as executable") {
+ auto const clargs_exec = p.CmdLineArgs("_entry_points_get_executable");
+ GraphTraverser const gt_get_exec{clargs_exec.gtargs};
+ auto const exec_output_paths = gt_get_exec.BuildAndStage(
+ clargs_exec.graph_description, clargs_exec.artifacts);
+
+ REQUIRE(exec_output_paths);
+ REQUIRE(exec_output_paths->first.size() == 1);
+ auto const exec_path = exec_output_paths->first.at(0);
+ CHECK(FileSystemManager::IsFile(exec_path));
+ CHECK(FileSystemManager::IsExecutable(exec_path));
+ CHECK(FileSystemManager::Type(exec_path) == ObjectType::Executable);
+
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() ==
+ 3); // One more action queued
+ CHECK(Statistics::Instance().ActionsCachedCounter() ==
+ 1); // But that action was cached
+ }
+ }
+}
+
+[[maybe_unused]] static void TestCopyLocalFile(bool run_local,
+ bool is_hermetic = true) {
+ TestProject p("copy_local_file", run_local);
+
+ auto const clargs = p.CmdLineArgs();
+ GraphTraverser const gt{clargs.gtargs};
+ auto const output_paths =
+ gt.BuildAndStage(clargs.graph_description, clargs.artifacts);
+
+ REQUIRE(output_paths);
+ REQUIRE(output_paths->first.size() == 1);
+ CHECK(FileSystemManager::IsFile(output_paths->first.at(0)));
+
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == 0);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == 0);
+ }
+}
+
+[[maybe_unused]] static void TestSequencePrinterBuildLibraryOnly(
+ bool run_local,
+ bool is_hermetic = true) {
+ TestProject p("sequence_printer_build_library_only", run_local);
+
+ auto const clargs = p.CmdLineArgs();
+ GraphTraverser const gt{clargs.gtargs};
+ auto const output_paths =
+ gt.BuildAndStage(clargs.graph_description, clargs.artifacts);
+
+ REQUIRE(output_paths);
+ REQUIRE(output_paths->first.size() == 1);
+ CHECK(FileSystemManager::IsFile(output_paths->first.at(0)));
+
+ auto const clargs_full_build = p.CmdLineArgs("_entry_points_full_build");
+ GraphTraverser const gt_full_build{clargs_full_build.gtargs};
+ auto const full_build_output_paths = gt_full_build.BuildAndStage(
+ clargs_full_build.graph_description, clargs_full_build.artifacts);
+
+ REQUIRE(full_build_output_paths);
+ REQUIRE(full_build_output_paths->first.size() == 1);
+ CHECK(FileSystemManager::IsFile(full_build_output_paths->first.at(0)));
+
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == 8);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == 3);
+ }
+ else {
+ CHECK(Statistics::Instance().ActionsCachedCounter() > 0);
+ }
+}
+
+[[maybe_unused]] static void TestHelloWorldWithKnownSource(
+ bool run_local,
+ bool is_hermetic = true) {
+ TestProject full_hello_world("hello_world_copy_message", run_local);
+
+ auto const clargs_update_cpp =
+ full_hello_world.CmdLineArgs("_entry_points_upload_source");
+ GraphTraverser const gt_upload{clargs_update_cpp.gtargs};
+ auto const cpp_output_path = gt_upload.BuildAndStage(
+ clargs_update_cpp.graph_description, clargs_update_cpp.artifacts);
+
+ REQUIRE(cpp_output_path);
+ REQUIRE(cpp_output_path->first.size() == 1);
+
+ CHECK(FileSystemManager::IsFile(cpp_output_path->first.at(0)));
+
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == 0);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == 0);
+ }
+
+ TestProject hello_world_known_cpp("hello_world_known_source", run_local);
+
+ auto const clargs = hello_world_known_cpp.CmdLineArgs();
+ GraphTraverser const gt{clargs.gtargs};
+ auto const output_paths =
+ gt.BuildAndStage(clargs.graph_description, clargs.artifacts);
+
+ REQUIRE(output_paths);
+ REQUIRE(output_paths->first.size() == 1);
+ CHECK(FileSystemManager::IsFile(output_paths->first.at(0)));
+
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == 2);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == 0);
+ }
+ else {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() >= 2);
+ }
+}
+
+static void TestBlobsUploadedAndUsed(bool run_local, bool is_hermetic = true) {
+ TestProject p("use_uploaded_blobs", run_local);
+ auto const clargs = p.CmdLineArgs();
+
+ GraphTraverser gt{clargs.gtargs};
+ auto const output_paths =
+ gt.BuildAndStage(clargs.graph_description, clargs.artifacts);
+
+ REQUIRE(output_paths);
+ REQUIRE(output_paths->first.size() == 1);
+ CHECK(FileSystemManager::IsFile(output_paths->first.at(0)));
+
+ auto const contents =
+ FileSystemManager::ReadFile(output_paths->first.at(0));
+ CHECK(contents.has_value());
+ CHECK(contents == "this is a test to check if blobs are uploaded");
+
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == 1);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == 0);
+ }
+ else {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() >= 1);
+ }
+}
+
+static void TestEnvironmentVariablesSetAndUsed(bool run_local,
+ bool is_hermetic = true) {
+ TestProject p("use_env_variables", run_local);
+ auto const clargs = p.CmdLineArgs();
+
+ GraphTraverser gt{clargs.gtargs};
+ auto const output_paths =
+ gt.BuildAndStage(clargs.graph_description, clargs.artifacts);
+
+ REQUIRE(output_paths);
+ REQUIRE(output_paths->first.size() == 1);
+ CHECK(FileSystemManager::IsFile(output_paths->first.at(0)));
+
+ auto const contents =
+ FileSystemManager::ReadFile(output_paths->first.at(0));
+ CHECK(contents.has_value());
+ CHECK(contents == "content from environment variable");
+
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == 1);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == 0);
+ }
+ else {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() >= 1);
+ }
+}
+
+static void TestTreesUsed(bool run_local, bool is_hermetic = true) {
+ TestProject p("use_trees", run_local);
+ auto const clargs = p.CmdLineArgs();
+
+ GraphTraverser gt{clargs.gtargs};
+ auto const output_paths =
+ gt.BuildAndStage(clargs.graph_description, clargs.artifacts);
+
+ REQUIRE(output_paths);
+ REQUIRE(output_paths->first.size() == 1);
+ CHECK(FileSystemManager::IsFile(output_paths->first.at(0)));
+
+ auto const contents =
+ FileSystemManager::ReadFile(output_paths->first.at(0));
+ CHECK(contents.has_value());
+ CHECK(contents == "this is a test to check if blobs are uploaded");
+
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == 2);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == 0);
+ }
+ else {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() >= 2);
+ }
+}
+
+static void TestNestedTreesUsed(bool run_local, bool is_hermetic = true) {
+ TestProject p("use_nested_trees", run_local);
+ auto const clargs = p.CmdLineArgs();
+
+ GraphTraverser gt{clargs.gtargs};
+ auto const output_paths =
+ gt.BuildAndStage(clargs.graph_description, clargs.artifacts);
+
+ REQUIRE(output_paths);
+ REQUIRE(output_paths->first.size() == 1);
+ CHECK(FileSystemManager::IsFile(output_paths->first.at(0)));
+
+ auto const contents =
+ FileSystemManager::ReadFile(output_paths->first.at(0));
+ CHECK(contents.has_value());
+ CHECK(contents == "this is a test to check if blobs are uploaded");
+
+ if (is_hermetic) {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() == 1);
+ CHECK(Statistics::Instance().ActionsCachedCounter() == 0);
+ }
+ else {
+ CHECK(Statistics::Instance().ActionsQueuedCounter() >= 1);
+ }
+}
+
+static void TestFlakyHelloWorldDetected(bool run_local,
+ bool /*is_hermetic*/ = true) {
+ TestProject p("flaky_hello_world", run_local);
+
+ {
+ auto clargs = p.CmdLineArgs("_entry_points_ctimes");
+ GraphTraverser const gt{clargs.gtargs};
+ auto const output_paths =
+ gt.BuildAndStage(clargs.graph_description, clargs.artifacts);
+
+ REQUIRE(output_paths);
+ REQUIRE(output_paths->first.size() == 1);
+ }
+
+ using namespace std::chrono_literals;
+ std::this_thread::sleep_for(1s);
+
+ // make_exe[flaky]->make_output[miss]
+ auto clargs_output = p.CmdLineArgs();
+ clargs_output.gtargs.rebuild = RebuildArguments{};
+ GraphTraverser const gt_output{clargs_output.gtargs};
+ REQUIRE(gt_output.BuildAndStage(clargs_output.graph_description,
+ clargs_output.artifacts));
+ CHECK(Statistics::Instance().ActionsFlakyCounter() == 1);
+ CHECK(Statistics::Instance().RebuiltActionComparedCounter() == 1);
+ CHECK(Statistics::Instance().RebuiltActionMissingCounter() == 1);
+ Statistics::Instance().Reset();
+
+ // make_exe[flaky]->make_output[miss]->strip_time [miss]
+ auto clargs_stripped = p.CmdLineArgs("_entry_points_stripped");
+ clargs_stripped.gtargs.rebuild = RebuildArguments{};
+ GraphTraverser const gt_stripped{clargs_stripped.gtargs};
+ REQUIRE(gt_stripped.BuildAndStage(clargs_stripped.graph_description,
+ clargs_stripped.artifacts));
+ CHECK(Statistics::Instance().ActionsFlakyCounter() == 1);
+ CHECK(Statistics::Instance().RebuiltActionComparedCounter() == 1);
+ CHECK(Statistics::Instance().RebuiltActionMissingCounter() == 2);
+ Statistics::Instance().Reset();
+
+ // make_exe[flaky]->make_output[miss]->strip_time[miss]->list_ctimes [flaky]
+ auto clargs_ctimes = p.CmdLineArgs("_entry_points_ctimes");
+ clargs_ctimes.gtargs.rebuild = RebuildArguments{};
+ GraphTraverser const gt_ctimes{clargs_ctimes.gtargs};
+ REQUIRE(gt_ctimes.BuildAndStage(clargs_ctimes.graph_description,
+ clargs_ctimes.artifacts));
+ CHECK(Statistics::Instance().ActionsFlakyCounter() == 2);
+ CHECK(Statistics::Instance().RebuiltActionComparedCounter() == 2);
+ CHECK(Statistics::Instance().RebuiltActionMissingCounter() == 2);
+}
+
+#endif // INCLUDED_SRC_TEST_BUILDTOOL_GRAPH_GRAVERSER_GRAPH_TRAVERSER_TEST_HPP
diff --git a/test/buildtool/graph_traverser/graph_traverser_local.test.cpp b/test/buildtool/graph_traverser/graph_traverser_local.test.cpp
new file mode 100644
index 00000000..86317625
--- /dev/null
+++ b/test/buildtool/graph_traverser/graph_traverser_local.test.cpp
@@ -0,0 +1,57 @@
+#include "catch2/catch.hpp"
+#include "test/buildtool/graph_traverser/graph_traverser.test.hpp"
+#include "test/utils/hermeticity/local.hpp"
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Local: Output created when entry point is local artifact",
+ "[graph_traverser]") {
+ TestCopyLocalFile(true);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Local: Output created and contents are correct",
+ "[graph_traverser]") {
+ TestHelloWorldCopyMessage(true);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Local: Actions are not re-run",
+ "[graph_traverser]") {
+ TestSequencePrinterBuildLibraryOnly(true);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Local: KNOWN artifact",
+ "[graph_traverser]") {
+ TestHelloWorldWithKnownSource(true);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Local: Blobs uploaded and correctly used",
+ "[graph_traverser]") {
+ TestBlobsUploadedAndUsed(true);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Local: Environment variables are set and used",
+ "[graph_traverser]") {
+ TestEnvironmentVariablesSetAndUsed(true);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Local: Trees correctly used",
+ "[graph_traverser]") {
+ TestTreesUsed(true);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Local: Nested trees correctly used",
+ "[graph_traverser]") {
+ TestNestedTreesUsed(true);
+}
+
+TEST_CASE_METHOD(HermeticLocalTestFixture,
+ "Local: Detect flaky actions",
+ "[graph_traverser]") {
+ TestFlakyHelloWorldDetected(true);
+}
diff --git a/test/buildtool/graph_traverser/graph_traverser_remote.test.cpp b/test/buildtool/graph_traverser/graph_traverser_remote.test.cpp
new file mode 100644
index 00000000..8aad88ad
--- /dev/null
+++ b/test/buildtool/graph_traverser/graph_traverser_remote.test.cpp
@@ -0,0 +1,41 @@
+#include "catch2/catch.hpp"
+#include "test/buildtool/graph_traverser/graph_traverser.test.hpp"
+
+TEST_CASE("Remote: Output created and contents are correct",
+ "[graph_traverser]") {
+ TestHelloWorldCopyMessage(false, false /* not hermetic */);
+}
+
+TEST_CASE("Remote: Output created when entry point is local artifact",
+ "[graph_traverser]") {
+ TestCopyLocalFile(false, false /* not hermetic */);
+}
+
+TEST_CASE("Remote: Actions are not re-run", "[graph_traverser]") {
+ TestSequencePrinterBuildLibraryOnly(false, false /* not hermetic */);
+}
+
+TEST_CASE("Remote: KNOWN artifact", "[graph_traverser]") {
+ TestHelloWorldWithKnownSource(false, false /* not hermetic */);
+}
+
+TEST_CASE("Remote: Blobs uploaded and correctly used", "[graph_traverser]") {
+ TestBlobsUploadedAndUsed(false, false /* not hermetic */);
+}
+
+TEST_CASE("Remote: Environment variables are set and used",
+ "[graph_traverser]") {
+ TestEnvironmentVariablesSetAndUsed(false, false /* not hermetic */);
+}
+
+TEST_CASE("Remote: Trees correctly used", "[graph_traverser]") {
+ TestTreesUsed(false, false /* not hermetic */);
+}
+
+TEST_CASE("Remote: Nested trees correctly used", "[graph_traverser]") {
+ TestNestedTreesUsed(false, false /* not hermetic */);
+}
+
+TEST_CASE("Remote: Detect flaky actions", "[graph_traverser]") {
+ TestFlakyHelloWorldDetected(false, false /* not hermetic */);
+}
diff --git a/test/buildtool/logging/TARGETS b/test/buildtool/logging/TARGETS
new file mode 100644
index 00000000..3eec70cc
--- /dev/null
+++ b/test/buildtool/logging/TARGETS
@@ -0,0 +1,26 @@
+{ "logger":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["logger"]
+ , "srcs": ["logger.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/logging", "logging"]
+ ]
+ , "stage": ["test", "buildtool", "logging"]
+ }
+, "log_sink_file":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["log_sink_file"]
+ , "srcs": ["log_sink_file.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/logging", "logging"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ ]
+ , "stage": ["test", "buildtool", "logging"]
+ }
+, "TESTS":
+ {"type": "install", "tainted": ["test"], "deps": ["log_sink_file", "logger"]}
+} \ No newline at end of file
diff --git a/test/buildtool/logging/log_sink_file.test.cpp b/test/buildtool/logging/log_sink_file.test.cpp
new file mode 100644
index 00000000..19c0bd7b
--- /dev/null
+++ b/test/buildtool/logging/log_sink_file.test.cpp
@@ -0,0 +1,99 @@
+#include <fstream>
+#include <thread>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/logging/log_config.hpp"
+#include "src/buildtool/logging/log_sink_cmdline.hpp"
+#include "src/buildtool/logging/log_sink_file.hpp"
+
+[[nodiscard]] static auto NumberOfLines(std::filesystem::path const& file_path)
+ -> int {
+ std::ifstream file(file_path);
+ std::string line{};
+ int number_of_lines{};
+ while (std::getline(file, line)) {
+ ++number_of_lines;
+ }
+ return number_of_lines;
+}
+
+[[nodiscard]] static auto GetLines(std::filesystem::path const& file_path)
+ -> std::vector<std::string> {
+ std::ifstream file(file_path);
+ std::string line{};
+ std::vector<std::string> lines{};
+ while (std::getline(file, line)) {
+ lines.push_back(line);
+ }
+ return lines;
+}
+
+TEST_CASE("LogSinkFile", "[logging]") {
+ LogConfig::SetSinks({LogSinkCmdLine::CreateFactory(false /*no color*/)});
+
+ // cleanup
+ std::string filename{"test/test.log"};
+ CHECK(FileSystemManager::RemoveFile(filename));
+ REQUIRE(not FileSystemManager::IsFile(filename));
+
+ // create test log file
+ REQUIRE(FileSystemManager::WriteFile("somecontent\n", filename));
+ REQUIRE(FileSystemManager::IsFile(filename));
+ CHECK(NumberOfLines(filename) == 1);
+
+ SECTION("Overwrite mode") {
+ LogSinkFile sink{filename, LogSinkFile::Mode::Overwrite};
+
+ sink.Emit(nullptr, LogLevel::Info, "first");
+ sink.Emit(nullptr, LogLevel::Info, "second");
+ sink.Emit(nullptr, LogLevel::Info, "third");
+
+ // read file and check line numbers
+ CHECK(NumberOfLines(filename) == 3);
+ }
+
+ SECTION("Append mode") {
+ LogSinkFile sink{filename, LogSinkFile::Mode::Append};
+
+ sink.Emit(nullptr, LogLevel::Info, "first");
+ sink.Emit(nullptr, LogLevel::Info, "second");
+ sink.Emit(nullptr, LogLevel::Info, "third");
+
+ // read file and check line numbers
+ CHECK(NumberOfLines(filename) == 4);
+ }
+
+ SECTION("Thread-safety") {
+ int const num_threads = 20;
+ LogSinkFile sink{filename, LogSinkFile::Mode::Append};
+
+ // start threads, each emitting a log message
+ std::vector<std::thread> threads{};
+ for (int id{}; id < num_threads; ++id) {
+ threads.emplace_back(
+ [&](int tid) {
+ sink.Emit(nullptr,
+ LogLevel::Info,
+ "this is thread " + std::to_string(tid));
+ },
+ id);
+ }
+
+ // wait for threads to finish
+ for (auto& thread : threads) {
+ thread.join();
+ }
+
+ // read file and check line numbers
+ auto lines = GetLines(filename);
+ CHECK(lines.size() == num_threads + 1);
+
+ // check for corrupted content
+ using Catch::Matchers::Contains;
+ for (auto const& line : lines) {
+ CHECK_THAT(line,
+ Contains("somecontent") or Contains("this is thread "));
+ }
+ }
+}
diff --git a/test/buildtool/logging/logger.test.cpp b/test/buildtool/logging/logger.test.cpp
new file mode 100644
index 00000000..329b69b0
--- /dev/null
+++ b/test/buildtool/logging/logger.test.cpp
@@ -0,0 +1,322 @@
+#include <atomic>
+#include <string>
+#include <unordered_map>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/logging/log_config.hpp"
+#include "src/buildtool/logging/log_sink.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+// Stores prints from test sink instances
+class TestPrints {
+ public:
+ static void Print(int sink_id, std::string const& print) noexcept {
+ prints_[sink_id].push_back(print);
+ }
+ [[nodiscard]] static auto Read(int sink_id) noexcept
+ -> std::vector<std::string> {
+ return prints_[sink_id];
+ }
+
+ static void Clear() noexcept {
+ prints_.clear();
+ counter_ = 0;
+ }
+
+ static auto GetId() noexcept -> int { return counter_++; }
+
+ private:
+ static inline std::atomic<int> counter_{};
+ static inline std::unordered_map<int, std::vector<std::string>> prints_{};
+};
+
+// Test sink, prints to TestPrints depending on its own instance id.
+class LogSinkTest : public ILogSink {
+ public:
+ static auto CreateFactory() -> LogSinkFactory {
+ return [] { return std::make_shared<LogSinkTest>(); };
+ }
+
+ LogSinkTest() noexcept { id_ = TestPrints::GetId(); }
+
+ void Emit(Logger const* logger,
+ LogLevel level,
+ std::string const& msg) const noexcept final {
+ auto prefix = LogLevelToString(level);
+
+ if (logger != nullptr) {
+ prefix += " (" + logger->Name() + ")";
+ }
+
+ TestPrints::Print(id_, prefix + ": " + msg);
+ }
+
+ private:
+ int id_{};
+};
+
+class OneGlobalSinkFixture {
+ public:
+ OneGlobalSinkFixture() {
+ TestPrints::Clear();
+ LogConfig::SetLogLimit(LogLevel::Info);
+ LogConfig::SetSinks({LogSinkTest::CreateFactory()});
+ }
+};
+
+class TwoGlobalSinksFixture : public OneGlobalSinkFixture {
+ public:
+ TwoGlobalSinksFixture() {
+ LogConfig::AddSink(LogSinkTest::CreateFactory());
+ }
+};
+
+TEST_CASE_METHOD(OneGlobalSinkFixture,
+ "Global static logger with one sink",
+ "[logger]") {
+ // logs should be forwarded to sink instance: 0
+ int instance = 0;
+
+ // create log outside of log limit
+ Logger::Log(LogLevel::Trace, "first");
+ CHECK(TestPrints::Read(instance).empty());
+
+ SECTION("create log within log limit") {
+ Logger::Log(LogLevel::Info, "second");
+ auto prints = TestPrints::Read(instance);
+ REQUIRE(prints.size() == 1);
+ CHECK(prints[0] == "INFO: second");
+
+ SECTION("increase log limit create log within log limit") {
+ LogConfig::SetLogLimit(LogLevel::Trace);
+ Logger::Log(LogLevel::Trace, "third");
+ auto prints = TestPrints::Read(instance);
+ REQUIRE(prints.size() == 2);
+ CHECK(prints[1] == "TRACE: third");
+
+ SECTION("log via lambda function") {
+ Logger::Log(LogLevel::Trace,
+ [] { return std::string{"forth"}; });
+ auto prints = TestPrints::Read(instance);
+ REQUIRE(prints.size() == 3);
+ CHECK(prints[2] == "TRACE: forth");
+ }
+ }
+ }
+}
+
+TEST_CASE_METHOD(OneGlobalSinkFixture,
+ "Local named logger using one global sink",
+ "[logger]") {
+ // create logger with sink instances from global LogConfig
+ Logger logger("TestLogger");
+
+ // logs should be forwarded to same sink instance as before: 0
+ int instance = 0;
+
+ // create log outside of log limit
+ logger.Emit(LogLevel::Trace, "first");
+ CHECK(TestPrints::Read(instance).empty());
+
+ SECTION("create log within log limit") {
+ logger.Emit(LogLevel::Info, "second");
+ auto prints = TestPrints::Read(instance);
+ REQUIRE(prints.size() == 1);
+ CHECK(prints[0] == "INFO (TestLogger): second");
+
+ SECTION("increase log limit create log within log limit") {
+ logger.SetLogLimit(LogLevel::Trace);
+ logger.Emit(LogLevel::Trace, "third");
+ auto prints = TestPrints::Read(instance);
+ REQUIRE(prints.size() == 2);
+ CHECK(prints[1] == "TRACE (TestLogger): third");
+
+ SECTION("log via lambda function") {
+ logger.Emit(LogLevel::Trace,
+ [] { return std::string{"forth"}; });
+ auto prints = TestPrints::Read(instance);
+ REQUIRE(prints.size() == 3);
+ CHECK(prints[2] == "TRACE (TestLogger): forth");
+ }
+ }
+ }
+}
+
+TEST_CASE_METHOD(OneGlobalSinkFixture,
+ "Local named logger with its own sink instance"
+ "[logger]") {
+ // create logger with separate sink instance
+ Logger logger("OwnSinkLogger", {LogSinkTest::CreateFactory()});
+
+ // logs should be forwarded to new sink instance: 1
+ int instance = 1;
+
+ // create log outside of log limit
+ logger.Emit(LogLevel::Trace, "first");
+ CHECK(TestPrints::Read(instance).empty());
+
+ SECTION("create log within log limit") {
+ logger.Emit(LogLevel::Info, "second");
+ auto prints = TestPrints::Read(instance);
+ REQUIRE(prints.size() == 1);
+ CHECK(prints[0] == "INFO (OwnSinkLogger): second");
+
+ SECTION("increase log limit create log within log limit") {
+ logger.SetLogLimit(LogLevel::Trace);
+ logger.Emit(LogLevel::Trace, "third");
+ auto prints = TestPrints::Read(instance);
+ REQUIRE(prints.size() == 2);
+ CHECK(prints[1] == "TRACE (OwnSinkLogger): third");
+
+ SECTION("log via lambda function") {
+ logger.Emit(LogLevel::Trace,
+ [] { return std::string{"forth"}; });
+ auto prints = TestPrints::Read(instance);
+ REQUIRE(prints.size() == 3);
+ CHECK(prints[2] == "TRACE (OwnSinkLogger): forth");
+ }
+ }
+ }
+}
+
+TEST_CASE_METHOD(TwoGlobalSinksFixture,
+ "Global static logger with two sinks",
+ "[logger]") {
+ // logs should be forwarded to sink instances: 0 and 1
+ int instance1 = 0;
+ int instance2 = 1;
+
+ // create log outside of log limit
+ Logger::Log(LogLevel::Trace, "first");
+ CHECK(TestPrints::Read(instance1).empty());
+ CHECK(TestPrints::Read(instance2).empty());
+
+ SECTION("create log within log limit") {
+ Logger::Log(LogLevel::Info, "second");
+ auto prints1 = TestPrints::Read(instance1);
+ auto prints2 = TestPrints::Read(instance2);
+ REQUIRE(prints1.size() == 1);
+ REQUIRE(prints2.size() == 1);
+ CHECK(prints1[0] == "INFO: second");
+ CHECK(prints2[0] == "INFO: second");
+
+ SECTION("increase log limit create log within log limit") {
+ LogConfig::SetLogLimit(LogLevel::Trace);
+ Logger::Log(LogLevel::Trace, "third");
+ auto prints1 = TestPrints::Read(instance1);
+ auto prints2 = TestPrints::Read(instance2);
+ REQUIRE(prints1.size() == 2);
+ REQUIRE(prints2.size() == 2);
+ CHECK(prints1[1] == "TRACE: third");
+ CHECK(prints2[1] == "TRACE: third");
+
+ SECTION("log via lambda function") {
+ Logger::Log(LogLevel::Trace,
+ [] { return std::string{"forth"}; });
+ auto prints1 = TestPrints::Read(instance1);
+ auto prints2 = TestPrints::Read(instance2);
+ REQUIRE(prints1.size() == 3);
+ REQUIRE(prints2.size() == 3);
+ CHECK(prints1[2] == "TRACE: forth");
+ CHECK(prints2[2] == "TRACE: forth");
+ }
+ }
+ }
+}
+
+TEST_CASE_METHOD(TwoGlobalSinksFixture,
+ "Local named logger using two global sinks",
+ "[logger]") {
+ // create logger with sink instances from global LogConfig
+ Logger logger("TestLogger");
+
+ // logs should be forwarded to same sink instances: 0 and 1
+ int instance1 = 0;
+ int instance2 = 1;
+
+ // create log outside of log limit
+ logger.Emit(LogLevel::Trace, "first");
+ CHECK(TestPrints::Read(instance1).empty());
+ CHECK(TestPrints::Read(instance2).empty());
+
+ SECTION("create log within log limit") {
+ logger.Emit(LogLevel::Info, "second");
+ auto prints1 = TestPrints::Read(instance1);
+ auto prints2 = TestPrints::Read(instance2);
+ REQUIRE(prints1.size() == 1);
+ REQUIRE(prints2.size() == 1);
+ CHECK(prints1[0] == "INFO (TestLogger): second");
+ CHECK(prints2[0] == "INFO (TestLogger): second");
+
+ SECTION("increase log limit create log within log limit") {
+ logger.SetLogLimit(LogLevel::Trace);
+ logger.Emit(LogLevel::Trace, "third");
+ auto prints1 = TestPrints::Read(instance1);
+ auto prints2 = TestPrints::Read(instance2);
+ REQUIRE(prints1.size() == 2);
+ REQUIRE(prints2.size() == 2);
+ CHECK(prints1[1] == "TRACE (TestLogger): third");
+ CHECK(prints2[1] == "TRACE (TestLogger): third");
+
+ SECTION("log via lambda function") {
+ logger.Emit(LogLevel::Trace,
+ [] { return std::string{"forth"}; });
+ auto prints1 = TestPrints::Read(instance1);
+ auto prints2 = TestPrints::Read(instance2);
+ REQUIRE(prints1.size() == 3);
+ REQUIRE(prints2.size() == 3);
+ CHECK(prints1[2] == "TRACE (TestLogger): forth");
+ CHECK(prints2[2] == "TRACE (TestLogger): forth");
+ }
+ }
+ }
+}
+
+TEST_CASE_METHOD(TwoGlobalSinksFixture,
+ "Local named logger with its own two sink instances",
+ "[logger]") {
+ // create logger with separate sink instances
+ Logger logger("OwnSinkLogger",
+ {LogSinkTest::CreateFactory(), LogSinkTest::CreateFactory()});
+
+ // logs should be forwarded to new sink instances: 2 and 3
+ int instance1 = 2;
+ int instance2 = 3;
+
+ // create log outside of log limit
+ logger.Emit(LogLevel::Trace, "first");
+ CHECK(TestPrints::Read(instance1).empty());
+ CHECK(TestPrints::Read(instance2).empty());
+
+ SECTION("create log within log limit") {
+ logger.Emit(LogLevel::Info, "second");
+ auto prints1 = TestPrints::Read(instance1);
+ auto prints2 = TestPrints::Read(instance2);
+ REQUIRE(prints1.size() == 1);
+ REQUIRE(prints2.size() == 1);
+ CHECK(prints1[0] == "INFO (OwnSinkLogger): second");
+ CHECK(prints2[0] == "INFO (OwnSinkLogger): second");
+
+ SECTION("increase log limit create log within log limit") {
+ logger.SetLogLimit(LogLevel::Trace);
+ logger.Emit(LogLevel::Trace, "third");
+ auto prints1 = TestPrints::Read(instance1);
+ auto prints2 = TestPrints::Read(instance2);
+ REQUIRE(prints1.size() == 2);
+ REQUIRE(prints2.size() == 2);
+ CHECK(prints1[1] == "TRACE (OwnSinkLogger): third");
+ CHECK(prints2[1] == "TRACE (OwnSinkLogger): third");
+
+ SECTION("log via lambda function") {
+ logger.Emit(LogLevel::Trace,
+ [] { return std::string{"forth"}; });
+ auto prints1 = TestPrints::Read(instance1);
+ auto prints2 = TestPrints::Read(instance2);
+ REQUIRE(prints1.size() == 3);
+ REQUIRE(prints2.size() == 3);
+ CHECK(prints1[2] == "TRACE (OwnSinkLogger): forth");
+ CHECK(prints2[2] == "TRACE (OwnSinkLogger): forth");
+ }
+ }
+ }
+}
diff --git a/test/buildtool/multithreading/TARGETS b/test/buildtool/multithreading/TARGETS
new file mode 100644
index 00000000..09fafaa8
--- /dev/null
+++ b/test/buildtool/multithreading/TARGETS
@@ -0,0 +1,76 @@
+{ "task":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["task"]
+ , "srcs": ["task.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["src/buildtool/multithreading", "task_system"]
+ ]
+ , "stage": ["test", "buildtool", "multithreading"]
+ }
+, "task_system":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["task_system"]
+ , "srcs": ["task_system.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/multithreading", "task_system"]
+ ]
+ , "stage": ["test", "buildtool", "multithreading"]
+ }
+, "async_map_node":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["async_map_node"]
+ , "srcs": ["async_map_node.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/multithreading", "async_map_node"]
+ , ["src/buildtool/multithreading", "task_system"]
+ ]
+ , "stage": ["test", "buildtool", "multithreading"]
+ }
+, "async_map":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["async_map"]
+ , "srcs": ["async_map.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/multithreading", "async_map"]
+ , ["src/buildtool/multithreading", "async_map_node"]
+ , ["src/buildtool/multithreading", "task_system"]
+ ]
+ , "stage": ["test", "buildtool", "multithreading"]
+ }
+, "async_map_consumer":
+ { "type": ["@", "rules", "CC/test", "test"]
+ , "name": ["async_map_consumer"]
+ , "srcs": ["async_map_consumer.test.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["test", "catch-main"]
+ , ["test/utils", "container_matchers"]
+ , ["src/buildtool/multithreading", "async_map_consumer"]
+ , ["src/buildtool/multithreading", "async_map"]
+ , ["src/buildtool/multithreading", "task_system"]
+ ]
+ , "stage": ["test", "buildtool", "multithreading"]
+ }
+, "TESTS":
+ { "type": "install"
+ , "tainted": ["test"]
+ , "deps":
+ [ "async_map"
+ , "async_map_consumer"
+ , "async_map_node"
+ , "task"
+ , "task_system"
+ ]
+ }
+} \ No newline at end of file
diff --git a/test/buildtool/multithreading/async_map.test.cpp b/test/buildtool/multithreading/async_map.test.cpp
new file mode 100644
index 00000000..bac7f031
--- /dev/null
+++ b/test/buildtool/multithreading/async_map.test.cpp
@@ -0,0 +1,58 @@
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/multithreading/async_map.hpp"
+#include "src/buildtool/multithreading/async_map_node.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+
+TEST_CASE("Single-threaded: nodes only created once", "[async_map]") {
+ AsyncMap<std::string, int> map;
+ auto* key_node = map.GetOrCreateNode("key");
+ CHECK(key_node != nullptr);
+
+ auto* other_node = map.GetOrCreateNode("otherkey");
+ CHECK(other_node != nullptr);
+
+ auto* should_be_key_node = map.GetOrCreateNode("key");
+ CHECK(should_be_key_node != nullptr);
+
+ CHECK(key_node != other_node);
+ CHECK(key_node == should_be_key_node);
+}
+
+TEST_CASE("Nodes only created once and survive the map destruction",
+ "[async_map]") {
+
+ using NodePtr = typename AsyncMap<std::string, int>::NodePtr;
+ NodePtr key_node{nullptr};
+ NodePtr other_node{nullptr};
+ NodePtr should_be_key_node{nullptr};
+ {
+ AsyncMap<std::string, int> map;
+ {
+ TaskSystem ts;
+ ts.QueueTask([&key_node, &map]() {
+ auto* node = map.GetOrCreateNode("key");
+ CHECK(node != nullptr);
+ key_node = node;
+ });
+
+ ts.QueueTask([&other_node, &map]() {
+ auto* node = map.GetOrCreateNode("otherkey");
+ CHECK(node != nullptr);
+ other_node = node;
+ });
+
+ ts.QueueTask([&should_be_key_node, &map]() {
+ auto* node = map.GetOrCreateNode("key");
+ CHECK(node != nullptr);
+ should_be_key_node = node;
+ });
+ }
+ }
+ CHECK(key_node != nullptr);
+ CHECK(other_node != nullptr);
+ CHECK(should_be_key_node != nullptr);
+ CHECK(key_node != other_node);
+ CHECK(key_node == should_be_key_node);
+}
diff --git a/test/buildtool/multithreading/async_map_consumer.test.cpp b/test/buildtool/multithreading/async_map_consumer.test.cpp
new file mode 100644
index 00000000..5edaeec0
--- /dev/null
+++ b/test/buildtool/multithreading/async_map_consumer.test.cpp
@@ -0,0 +1,309 @@
+#include <cstdint> // for fixed width integral types
+#include <numeric>
+#include <string>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/multithreading/async_map.hpp"
+#include "src/buildtool/multithreading/async_map_consumer.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+
+auto FibonacciMapConsumer() -> AsyncMapConsumer<int, uint64_t> {
+ auto value_creator = [](auto /*unused*/,
+ auto setter,
+ auto logger,
+ auto subcaller,
+ auto const& key) {
+ if (key < 0) {
+ (*logger)("index needs to be non-negative", true);
+ return;
+ }
+ if (key < 2) {
+ (*setter)(uint64_t{static_cast<uint64_t>(key)});
+ return;
+ }
+ (*subcaller)(
+ std::vector<int>{key - 2, key - 1},
+ [setter](auto const& values) {
+ (*setter)(*values[0] + *values[1]);
+ },
+ logger);
+ };
+ return AsyncMapConsumer<int, uint64_t>{value_creator};
+}
+
+auto FibOnEvenConsumer() -> AsyncMapConsumer<int, uint64_t> {
+ auto value_creator = [](auto /*unused*/,
+ auto setter,
+ auto logger,
+ auto subcaller,
+ auto const& key) {
+ if (key < 0) {
+ (*logger)("index needs to be non-negative (and actually even)",
+ true);
+ return;
+ }
+ if (key == 0) {
+ (*setter)(uint64_t{static_cast<uint64_t>(0)});
+ return;
+ }
+ if (key == 2) {
+ (*setter)(uint64_t{static_cast<uint64_t>(1)});
+ return;
+ }
+ (*subcaller)(
+ std::vector<int>{key - 4, key - 2},
+ [setter](auto const& values) {
+ (*setter)(*values[0] + *values[1]);
+ },
+ logger);
+ };
+ return AsyncMapConsumer<int, uint64_t>{value_creator};
+}
+
+auto CountToMaxConsumer(int max_val, int step = 1, bool cycle = false)
+ -> AsyncMapConsumer<int, uint64_t> {
+ auto value_creator = [max_val, step, cycle](auto /*unused*/,
+ auto setter,
+ auto logger,
+ auto subcaller,
+ auto const& key) {
+ if (key < 0 or key > max_val) { // intentional bug: non-fatal abort
+ (*logger)("index out of range", false);
+ return;
+ }
+ if (key == max_val) { // will never be reached if cycle==true
+ (*setter)(uint64_t{static_cast<uint64_t>(key)});
+ return;
+ }
+ auto next = key + step;
+ if (cycle) {
+ next %= max_val;
+ }
+ (*subcaller)(
+ {next},
+ [setter](auto const& values) { (*setter)(uint64_t{*values[0]}); },
+ logger);
+ };
+ return AsyncMapConsumer<int, uint64_t>{value_creator};
+}
+
+TEST_CASE("Fibonacci", "[async_map_consumer]") {
+ uint64_t result{};
+ int const index{92};
+ bool execution_failed = false;
+ uint64_t const expected_result{7540113804746346429};
+ auto mapconsumer = FibonacciMapConsumer();
+ {
+ TaskSystem ts;
+
+ mapconsumer.ConsumeAfterKeysReady(
+ &ts,
+ {index},
+ [&result](auto const& values) { result = *values[0]; },
+ [&execution_failed](std::string const& /*unused*/,
+ bool /*unused*/) { execution_failed = true; });
+ }
+ CHECK(not execution_failed);
+ CHECK(result == expected_result);
+}
+
+TEST_CASE("Values only used once nodes are marked ready",
+ "[async_map_consumer]") {
+ AsyncMapConsumer<int, bool> consume_when_ready{[](auto /*unused*/,
+ auto setter,
+ auto logger,
+ auto subcaller,
+ auto const& key) {
+ if (key == 0) {
+ (*setter)(true);
+ return;
+ }
+ (*subcaller)(
+ {key - 1},
+ [setter, logger, key](auto const& values) {
+ auto const ready_when_used = values[0];
+ if (not ready_when_used) {
+ (*logger)(std::to_string(key), true);
+ }
+ (*setter)(true);
+ },
+ logger);
+ }};
+ std::vector<std::string> value_used_before_ready{};
+ std::mutex vectorm;
+ bool final_value{false};
+ int const starting_index = 100;
+ {
+ TaskSystem ts;
+
+ consume_when_ready.ConsumeAfterKeysReady(
+ &ts,
+ {starting_index},
+ [&final_value](auto const& values) { final_value = values[0]; },
+ [&value_used_before_ready, &vectorm](std::string const& key,
+ bool /*unused*/) {
+ std::unique_lock l{vectorm};
+ value_used_before_ready.push_back(key);
+ });
+ }
+ CHECK(value_used_before_ready.empty());
+ CHECK(final_value);
+}
+
+TEST_CASE("No subcalling necessary", "[async_map_consumer]") {
+ AsyncMapConsumer<int, int> identity{
+ [](auto /*unused*/,
+ auto setter,
+ [[maybe_unused]] auto logger,
+ [[maybe_unused]] auto subcaller,
+ auto const& key) { (*setter)(int{key}); }};
+ std::vector<int> final_values{};
+ std::vector<int> const keys{1, 23, 4};
+ {
+ TaskSystem ts;
+ identity.ConsumeAfterKeysReady(
+ &ts,
+ keys,
+ [&final_values](auto const& values) {
+ std::transform(values.begin(),
+ values.end(),
+ std::back_inserter(final_values),
+ [](auto* val) { return *val; });
+ },
+ [](std::string const& /*unused*/, bool /*unused*/) {});
+ }
+ CHECK(keys == final_values);
+}
+
+TEST_CASE("FibOnEven", "[async_map_consumer]") {
+ uint64_t result{};
+ int const index{184};
+ bool execution_failed = false;
+ uint64_t const expected_result{7540113804746346429};
+ auto mapconsumer = FibOnEvenConsumer();
+ {
+ TaskSystem ts;
+
+ mapconsumer.ConsumeAfterKeysReady(
+ &ts,
+ {index},
+ [&result](auto const& values) { result = *values[0]; },
+ [&execution_failed](std::string const& /*unused*/,
+ bool /*unused*/) { execution_failed = true; });
+ }
+ CHECK(not execution_failed);
+ CHECK(result == expected_result);
+}
+
+TEST_CASE("ErrorPropagation", "[async_map_consumer]") {
+ int const index{183}; // Odd number, will fail
+ bool execution_failed = false;
+ bool consumer_called = false;
+ std::atomic<int> fail_cont_counter{0};
+ auto mapconsumer = FibOnEvenConsumer();
+ {
+ TaskSystem ts;
+
+ mapconsumer.ConsumeAfterKeysReady(
+ &ts,
+ {index},
+ [&consumer_called](auto const& /*unused*/) {
+ consumer_called = true;
+ },
+ [&execution_failed](std::string const& /*unused*/,
+ bool /*unused*/) { execution_failed = true; },
+ [&fail_cont_counter]() { fail_cont_counter++; });
+ }
+ CHECK(execution_failed);
+ CHECK(!consumer_called);
+ CHECK(fail_cont_counter == 1);
+}
+
+TEST_CASE("Failure detection", "[async_map_consumer]") {
+ int const kMaxVal = 1000; // NOLINT
+ std::optional<int> value{std::nullopt};
+ bool failed{};
+
+ SECTION("Unfinished pending keys") {
+ int const kStep{3};
+ REQUIRE(std::lcm(kMaxVal, kStep) > kMaxVal);
+ auto map = CountToMaxConsumer(kMaxVal, kStep);
+ {
+ TaskSystem ts;
+ map.ConsumeAfterKeysReady(
+ &ts,
+ {0},
+ [&value](auto const& values) { value = *values[0]; },
+ [&failed](std::string const& /*unused*/, bool fatal) {
+ failed = failed or fatal;
+ });
+ }
+ CHECK_FALSE(value);
+ CHECK_FALSE(failed);
+ CHECK_FALSE(map.DetectCycle());
+
+ auto const pending = map.GetPendingKeys();
+ CHECK_FALSE(pending.empty());
+
+ std::vector<int> expected{};
+ expected.reserve(kMaxVal + 1);
+ for (int i = 0; i < kMaxVal + kStep; i += kStep) {
+ expected.emplace_back(i);
+ }
+ CHECK_THAT(pending, Catch::Matchers::UnorderedEquals(expected));
+ }
+
+ SECTION("Cycle containing all unfinished keys") {
+ auto map = CountToMaxConsumer(kMaxVal, 1, /*cycle=*/true);
+ {
+ TaskSystem ts;
+ map.ConsumeAfterKeysReady(
+ &ts,
+ {0},
+ [&value](auto const& values) { value = *values[0]; },
+ [&failed](std::string const& /*unused*/, bool fatal) {
+ failed = failed or fatal;
+ });
+ }
+ CHECK_FALSE(value);
+ CHECK_FALSE(failed);
+
+ auto const pending = map.GetPendingKeys();
+ CHECK_FALSE(pending.empty());
+
+ auto const cycle = map.DetectCycle();
+ REQUIRE(cycle);
+
+ // pending contains all keys from cycle (except last duplicate key)
+ CHECK_THAT(pending,
+ Catch::Matchers::UnorderedEquals<int>(
+ {cycle->begin(), cycle->end() - 1}));
+
+ // cycle contains keys in correct order
+ std::vector<int> expected{};
+ expected.reserve(kMaxVal + 1);
+ for (int i = cycle->at(0); i < cycle->at(0) + kMaxVal + 1; ++i) {
+ expected.emplace_back(i % kMaxVal);
+ }
+ CHECK_THAT(*cycle, Catch::Matchers::Equals(expected));
+ }
+
+ SECTION("No cycle and no unfinished keys") {
+ auto map = CountToMaxConsumer(kMaxVal);
+ {
+ TaskSystem ts;
+ map.ConsumeAfterKeysReady(
+ &ts,
+ {0},
+ [&value](auto const& values) { value = *values[0]; },
+ [&failed](std::string const& /*unused*/, bool fatal) {
+ failed = failed or fatal;
+ });
+ }
+ REQUIRE(value);
+ CHECK(*value == kMaxVal);
+ CHECK_FALSE(failed);
+ CHECK_FALSE(map.DetectCycle());
+ CHECK(map.GetPendingKeys().empty());
+ }
+}
diff --git a/test/buildtool/multithreading/async_map_node.test.cpp b/test/buildtool/multithreading/async_map_node.test.cpp
new file mode 100644
index 00000000..9377e7f2
--- /dev/null
+++ b/test/buildtool/multithreading/async_map_node.test.cpp
@@ -0,0 +1,93 @@
+#include <mutex>
+#include <string>
+#include <thread>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/multithreading/async_map_node.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+
+TEST_CASE("No task is queued if the node is never ready", "[async_map_node]") {
+ std::vector<int> tasks;
+ std::mutex m;
+ AsyncMapNode<int, bool> node_never_ready{0};
+ {
+ TaskSystem ts;
+ CHECK_FALSE(
+ node_never_ready.AddOrQueueAwaitingTask(&ts, [&tasks, &m]() {
+ std::unique_lock l{m};
+ // NOLINTNEXTLINE(readability-magic-numbers,cppcoreguidelines-avoid-magic-numbers)
+ tasks.push_back(0);
+ }));
+ CHECK_FALSE(
+ node_never_ready.AddOrQueueAwaitingTask(&ts, [&tasks, &m]() {
+ std::unique_lock l{m};
+ // NOLINTNEXTLINE(readability-magic-numbers,cppcoreguidelines-avoid-magic-numbers)
+ tasks.push_back(1);
+ }));
+ CHECK_FALSE(
+ node_never_ready.AddOrQueueAwaitingTask(&ts, [&tasks, &m]() {
+ std::unique_lock l{m};
+ // NOLINTNEXTLINE(readability-magic-numbers,cppcoreguidelines-avoid-magic-numbers)
+ tasks.push_back(2);
+ }));
+ }
+ CHECK(tasks.empty());
+}
+
+TEST_CASE("Value is set correctly", "[async_map_node]") {
+ AsyncMapNode<int, bool> node{0};
+ {
+ TaskSystem ts;
+ node.SetAndQueueAwaitingTasks(&ts, true);
+ }
+ CHECK(node.GetValue());
+}
+
+TEST_CASE("Tasks are queued correctly", "[async_map_node]") {
+ AsyncMapNode<int, std::string> node{0};
+ std::vector<int> tasks;
+ std::mutex m;
+ {
+ TaskSystem ts;
+ CHECK_FALSE(node.AddOrQueueAwaitingTask(&ts, [&tasks, &m]() {
+ std::unique_lock l{m};
+ // NOLINTNEXTLINE(readability-magic-numbers,cppcoreguidelines-avoid-magic-numbers)
+ tasks.push_back(0);
+ }));
+ CHECK_FALSE(node.AddOrQueueAwaitingTask(&ts, [&tasks, &m]() {
+ std::unique_lock l{m};
+ // NOLINTNEXTLINE(readability-magic-numbers,cppcoreguidelines-avoid-magic-numbers)
+ tasks.push_back(1);
+ }));
+ CHECK_FALSE(node.AddOrQueueAwaitingTask(&ts, [&tasks, &m]() {
+ std::unique_lock l{m};
+ // NOLINTNEXTLINE(readability-magic-numbers,cppcoreguidelines-avoid-magic-numbers)
+ tasks.push_back(2);
+ }));
+
+ {
+ std::unique_lock l{m};
+ CHECK(tasks.empty());
+ }
+ node.SetAndQueueAwaitingTasks(&ts, "ready");
+ CHECK(node.AddOrQueueAwaitingTask(&ts, [&tasks, &m]() {
+ std::unique_lock l{m};
+ // NOLINTNEXTLINE(readability-magic-numbers,cppcoreguidelines-avoid-magic-numbers)
+ tasks.push_back(3);
+ }));
+ CHECK(node.AddOrQueueAwaitingTask(&ts, [&tasks, &m]() {
+ std::unique_lock l{m};
+ // NOLINTNEXTLINE(readability-magic-numbers,cppcoreguidelines-avoid-magic-numbers)
+ tasks.push_back(4);
+ }));
+ CHECK(node.AddOrQueueAwaitingTask(&ts, [&tasks, &m]() {
+ std::unique_lock l{m};
+ // NOLINTNEXTLINE(readability-magic-numbers,cppcoreguidelines-avoid-magic-numbers)
+ tasks.push_back(5);
+ }));
+ }
+ CHECK(node.GetValue() == "ready");
+ CHECK_THAT(
+ tasks,
+ Catch::Matchers::UnorderedEquals(std::vector<int>{0, 1, 2, 3, 4, 5}));
+}
diff --git a/test/buildtool/multithreading/task.test.cpp b/test/buildtool/multithreading/task.test.cpp
new file mode 100644
index 00000000..40d641c3
--- /dev/null
+++ b/test/buildtool/multithreading/task.test.cpp
@@ -0,0 +1,328 @@
+#include "catch2/catch.hpp"
+#include "src/buildtool/multithreading/task.hpp"
+
+namespace {
+
+struct StatelessCallable {
+ void operator()() noexcept {}
+};
+
+struct ValueCaptureCallable {
+ explicit ValueCaptureCallable(int i) noexcept : number{i} {}
+
+ // NOLINTNEXTLINE
+ void operator()() noexcept { number += 5; }
+
+ int number;
+};
+
+struct RefCaptureCallable {
+ // NOLINTNEXTLINE(google-runtime-references)
+ explicit RefCaptureCallable(int& i) noexcept : number{i} {}
+
+ // NOLINTNEXTLINE
+ void operator()() noexcept { number += 3; }
+
+ int& number;
+};
+
+} // namespace
+
+TEST_CASE("Default constructed task is empty", "[task]") {
+ Task t;
+ CHECK(!t);
+ CHECK(!(Task()));
+ CHECK(!(Task{}));
+}
+
+TEST_CASE("Task constructed from empty function is empty", "[task]") {
+ std::function<void()> empty_function;
+ Task t_from_empty_function{empty_function};
+
+ CHECK(!Task(std::function<void()>{}));
+ CHECK(!Task(empty_function));
+ CHECK(!t_from_empty_function);
+}
+
+TEST_CASE("Task constructed from user defined callable object is not empty",
+ "[task]") {
+ SECTION("Stateless struct") {
+ Task t{StatelessCallable{}};
+ StatelessCallable callable;
+ Task t_from_named_callable{callable};
+
+ CHECK(Task{StatelessCallable{}});
+ CHECK(Task{callable});
+ CHECK(t);
+ CHECK(t_from_named_callable);
+ }
+
+ SECTION("Statefull struct") {
+ SECTION("Reference capture") {
+ int a = 2;
+ Task t_ref{RefCaptureCallable{a}};
+ RefCaptureCallable three_adder{a};
+ Task t_from_named_callable_ref_capture{three_adder};
+
+ CHECK(Task{RefCaptureCallable{a}});
+ CHECK(Task{three_adder});
+ CHECK(t_ref);
+ CHECK(t_from_named_callable_ref_capture);
+ }
+
+ SECTION("Value capture") {
+ Task t_value{ValueCaptureCallable{1}};
+ ValueCaptureCallable callable{2};
+ Task t_from_named_callable_value_capture{callable};
+
+ CHECK(Task{ValueCaptureCallable{3}});
+ CHECK(Task{callable});
+ CHECK(t_value);
+ CHECK(t_from_named_callable_value_capture);
+ }
+ }
+}
+
+TEST_CASE("Task constructed from lambda is not empty", "[task]") {
+ SECTION("Stateless lambda") {
+ Task t{[]() {}};
+ auto callable = []() {};
+ Task t_from_named_callable{callable};
+
+ CHECK(Task{[]() {}});
+ CHECK(Task{callable});
+ CHECK(t);
+ CHECK(t_from_named_callable);
+ }
+
+ SECTION("Statefull lambda") {
+ SECTION("Reference capture") {
+ int a = 2;
+ Task t_ref{[&a]() { a += 3; }};
+ auto lambda = [&a]() { a += 3; };
+ Task t_from_named_lambda_ref_capture{lambda};
+
+ CHECK(Task{[&a]() { a += 3; }});
+ CHECK(Task{lambda});
+ CHECK(t_ref);
+ CHECK(t_from_named_lambda_ref_capture);
+ }
+
+ SECTION("Value capture") {
+ int a = 1;
+ // NOLINTNEXTLINE
+ Task t_value{[num = a]() mutable { num += 5; }};
+ // NOLINTNEXTLINE
+ auto lambda = [num = a]() mutable { num += 5; };
+ Task t_from_named_lambda_value_capture{lambda};
+
+ CHECK(Task{[num = a]() mutable { num += 5; }});
+ CHECK(Task{lambda});
+ CHECK(t_value);
+ CHECK(t_from_named_lambda_value_capture);
+ }
+ }
+}
+
+TEST_CASE("Task can be executed and doesn't steal contents", "[task]") {
+ SECTION("User defined object") {
+ SECTION("Value capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ ValueCaptureCallable add_five{num};
+ Task t_add_five{add_five};
+ CHECK(add_five.number == initial_value);
+ t_add_five();
+
+ // Internal data has been copied once again to the Task, so what is
+ // modified in the call to the task op() is not the data we can
+ // observe from the struct we created (add_five.number)
+ CHECK(add_five.number == initial_value);
+ CHECK(num == initial_value);
+ }
+ SECTION("Reference capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ RefCaptureCallable add_three{num};
+ Task t_add_three{add_three};
+ CHECK(add_three.number == initial_value);
+ t_add_three();
+
+ // In this case, data modified by the task is the same than the one
+ // in the struct, so we can observe the change
+ CHECK(add_three.number == initial_value + 3);
+ CHECK(&num == &add_three.number);
+ }
+ }
+
+ SECTION("Anonymous lambda function") {
+ SECTION("Value capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ // NOLINTNEXTLINE
+ Task t_add_five{[a = num]() mutable { a += 5; }};
+ t_add_five();
+
+ // Internal data can not be observed, external data does not change
+ CHECK(num == initial_value);
+ }
+ SECTION("Reference capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ // NOLINTNEXTLINE
+ Task t_add_three{[&num]() { num += 3; }};
+ t_add_three();
+
+ // Internal data can not be observed, external data changes
+ CHECK(num == initial_value + 3);
+ }
+ }
+
+ SECTION("Named lambda function") {
+ SECTION("Value capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ // NOLINTNEXTLINE
+ auto add_five = [a = num]() mutable { a += 5; };
+ Task t_add_five{add_five};
+ t_add_five();
+
+ // Internal data can not be observed, external data does not change
+ CHECK(num == initial_value);
+ // Lambda can be still called (we can't observe side effects)
+ add_five();
+ }
+ SECTION("Reference capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ // NOLINTNEXTLINE
+ auto add_three = [&num]() { num += 3; };
+ Task t_add_three{add_three};
+ t_add_three();
+
+ // Internal data can not be observed, external data changes
+ CHECK(num == initial_value + 3);
+ // Lambda can be still called (and side effects are as expected)
+ add_three();
+ CHECK(num == initial_value + 6);
+ }
+ }
+
+ SECTION("std::function") {
+ SECTION("Value capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ // NOLINTNEXTLINE
+ std::function<void()> add_five{[a = num]() mutable { a += 5; }};
+ Task t_add_five{add_five};
+ t_add_five();
+
+ // Internal data can not be observed, external data does not change
+ CHECK(num == initial_value);
+ // Original function still valid (side effects not observable)
+ CHECK(add_five);
+ add_five();
+ }
+ SECTION("Reference capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ // NOLINTNEXTLINE
+ std::function<void()> add_three{[&num]() { num += 3; }};
+ Task t_add_three{add_three};
+ t_add_three();
+
+ // Internal data can not be observed, external data changes
+ CHECK(num == initial_value + 3);
+ // Original function still valid (and side effects are as expected)
+ CHECK(add_three);
+ add_three();
+ CHECK(num == initial_value + 6);
+ }
+ }
+}
+
+TEST_CASE("Task moving from named object can be executed", "[task]") {
+ // Constructing Tasks from named objects using Task{std::move(named_object)}
+ // is only a way to explicitely express that the constructor from Task that
+ // will be called will treat `named_object` as an rvalue (temporary object).
+ // We could accomplish the same by using `Task t{Type{args}};` where `Type`
+ // is the type of the callable object.
+ SECTION("User defined object") {
+ SECTION("Value capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ ValueCaptureCallable add_five{num};
+ // NOLINTNEXTLINE
+ Task t_add_five{std::move(add_five)};
+ t_add_five();
+
+ // No observable side effects
+ CHECK(num == initial_value);
+ }
+ SECTION("Reference capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ RefCaptureCallable add_three{num};
+ // NOLINTNEXTLINE
+ Task t_add_three{std::move(add_three)};
+ t_add_three();
+
+ // External data must have been affected by side effect but in this
+ // case `add_three` is a moved-from object so there is no guarrantee
+ // about the data it holds
+ CHECK(num == initial_value + 3);
+ }
+ }
+
+ // Note that for anonymous lambdas the move constructor of Task is the one
+ // that has already been tested
+ SECTION("Named lambda function") {
+ SECTION("Value capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ // NOLINTNEXTLINE
+ auto add_five = [a = num]() mutable { a += 5; };
+ Task t_add_five{std::move(add_five)};
+ t_add_five();
+
+ // Internal data can not be observed, external data does not change
+ CHECK(num == initial_value);
+ }
+ SECTION("Reference capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ // NOLINTNEXTLINE
+ auto add_three = [&num]() { num += 3; };
+ Task t_add_three{std::move(add_three)};
+ t_add_three();
+
+ // Internal data can not be observed, external data changes
+ CHECK(num == initial_value + 3);
+ }
+ }
+
+ SECTION("std::function") {
+ SECTION("Value capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ // NOLINTNEXTLINE
+ std::function<void()> add_five{[a = num]() mutable { a += 5; }};
+ Task t_add_five{std::move(add_five)};
+ t_add_five();
+
+ // Internal data can not be observed, external data does not change
+ CHECK(num == initial_value);
+ }
+ SECTION("Reference capture") {
+ int const initial_value = 2;
+ int num = initial_value;
+ // NOLINTNEXTLINE
+ std::function<void()> add_three{[&num]() { num += 3; }};
+ Task t_add_three{std::move(add_three)};
+ t_add_three();
+
+ // Internal data can not be observed, external data changes
+ CHECK(num == initial_value + 3);
+ }
+ }
+}
diff --git a/test/buildtool/multithreading/task_system.test.cpp b/test/buildtool/multithreading/task_system.test.cpp
new file mode 100644
index 00000000..8488417c
--- /dev/null
+++ b/test/buildtool/multithreading/task_system.test.cpp
@@ -0,0 +1,225 @@
+#include <chrono>
+#include <mutex>
+#include <numeric> // std::iota
+#include <string>
+#include <thread>
+#include <unordered_set>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/multithreading/task_system.hpp"
+#include "test/utils/container_matchers.hpp"
+
+namespace {
+
+enum class CallStatus { kNotExecuted, kExecuted };
+
+} // namespace
+
+TEST_CASE("Basic", "[task_system]") {
+ SECTION("Empty task system terminates") {
+ { TaskSystem ts; }
+ CHECK(true);
+ }
+ SECTION("0-arguments constructor") {
+ TaskSystem ts;
+ CHECK(ts.NumberOfThreads() == std::thread::hardware_concurrency());
+ }
+ SECTION("1-argument constructor") {
+ std::size_t const desired_number_of_threads_in_ts =
+ GENERATE(1u, 2u, 5u, 10u, std::thread::hardware_concurrency());
+ TaskSystem ts(desired_number_of_threads_in_ts);
+ CHECK(ts.NumberOfThreads() == desired_number_of_threads_in_ts);
+ }
+}
+
+TEST_CASE("Side effects of tasks are reflected out of ts", "[task_system]") {
+ SECTION("Lambda function") {
+ auto status = CallStatus::kNotExecuted;
+ { // Make sure that all tasks will be completed before the checks
+ TaskSystem ts;
+ ts.QueueTask([&status]() { status = CallStatus::kExecuted; });
+ }
+ CHECK(status == CallStatus::kExecuted);
+ }
+ SECTION("std::function") {
+ auto status = CallStatus::kNotExecuted;
+ {
+ TaskSystem ts;
+ std::function<void()> f{
+ [&status]() { status = CallStatus::kExecuted; }};
+ ts.QueueTask(f);
+ }
+ CHECK(status == CallStatus::kExecuted);
+ }
+ SECTION("Struct") {
+ auto s = CallStatus::kNotExecuted;
+ struct Callable {
+ explicit Callable(CallStatus* cs) : status{cs} {}
+ void operator()() const { *status = CallStatus::kExecuted; }
+ CallStatus* status;
+ };
+ Callable c{&s};
+ {
+ TaskSystem ts;
+ ts.QueueTask(c);
+ }
+ CHECK(&s == c.status);
+ CHECK(s == CallStatus::kExecuted);
+ }
+ SECTION("Lambda capturing `this` inside struct") {
+ std::string ext_name{};
+ struct Wrapper {
+ TaskSystem ts{};
+ std::string name{};
+
+ explicit Wrapper(std::string n) : name{std::move(n)} {}
+
+ void QueueSetAndCheck(std::string* ext) {
+ ts.QueueTask([this, ext]() {
+ SetDefaultName();
+ CheckDefaultName(ext);
+ });
+ }
+
+ void SetDefaultName() { name = "Default"; }
+
+ void CheckDefaultName(std::string* ext) const {
+ *ext = name;
+ CHECK(name == "Default");
+ }
+ };
+ {
+ Wrapper w{"Non-default name"};
+ w.QueueSetAndCheck(&ext_name);
+ }
+ CHECK(ext_name == "Default");
+ }
+}
+
+TEST_CASE("All tasks are executed", "[task_system]") {
+ std::size_t const number_of_tasks = 1000;
+ std::vector<int> tasks_executed;
+ std::vector<int> queued_tasks(number_of_tasks);
+ std::iota(std::begin(queued_tasks), std::end(queued_tasks), 0);
+ std::mutex m;
+
+ {
+ TaskSystem ts;
+ for (auto task_num : queued_tasks) {
+ ts.QueueTask([&tasks_executed, &m, task_num]() {
+ std::unique_lock l{m};
+ tasks_executed.push_back(task_num);
+ });
+ }
+ }
+
+ CHECK_THAT(tasks_executed,
+ HasSameElementsAs<std::vector<int>>(queued_tasks));
+}
+
+TEST_CASE("Task is executed even if it needs to wait for a long while",
+ "[task_system]") {
+ auto status = CallStatus::kNotExecuted;
+
+ // Calculate what would take for the task system to be constructed, queue a
+ // non-sleeping task, execute it and be destructed
+ auto const start_no_sleep = std::chrono::high_resolution_clock::now();
+ {
+ TaskSystem ts;
+ ts.QueueTask([&status]() { status = CallStatus::kExecuted; });
+ }
+ auto const end_no_sleep = std::chrono::high_resolution_clock::now();
+
+ status = CallStatus::kNotExecuted;
+
+ std::chrono::nanoseconds const sleep_time =
+ 10 * std::chrono::duration_cast<std::chrono::nanoseconds>(
+ end_no_sleep - start_no_sleep);
+ auto const start = std::chrono::high_resolution_clock::now();
+ {
+ TaskSystem ts;
+ ts.QueueTask([&status, sleep_time]() {
+ std::this_thread::sleep_for(sleep_time);
+ status = CallStatus::kExecuted;
+ });
+ }
+ auto const end = std::chrono::high_resolution_clock::now();
+ CHECK(end - start > sleep_time);
+ CHECK(status == CallStatus::kExecuted);
+}
+
+TEST_CASE("All threads run until work is done", "[task_system]") {
+ using namespace std::chrono_literals;
+ static auto const kNumThreads = std::thread::hardware_concurrency();
+ static auto const kFailTimeout = 10s;
+
+ std::mutex mutex{};
+ std::condition_variable cv{};
+ std::unordered_set<std::thread::id> tids{};
+
+ // Add thread id to set and wait for others to do the same.
+ auto store_id = [&tids, &mutex, &cv]() -> void {
+ std::unique_lock lock(mutex);
+ tids.emplace(std::this_thread::get_id());
+ cv.notify_all();
+ cv.wait_for(
+ lock, kFailTimeout, [&tids] { return tids.size() == kNumThreads; });
+ };
+
+ SECTION("single task produces multiple tasks") {
+ {
+ TaskSystem ts{kNumThreads};
+ // Wait some time for all threads to go to sleep.
+ std::this_thread::sleep_for(1s);
+
+ // Run singe task that creates the actual store tasks. All threads
+ // should stay alive until their corresponding queue is filled.
+ ts.QueueTask([&ts, &store_id] {
+ // One task per thread (assumes round-robin push to queues).
+ for (std::size_t i{}; i < ts.NumberOfThreads(); ++i) {
+ ts.QueueTask([&store_id] { store_id(); });
+ }
+ });
+ }
+ CHECK(tids.size() == kNumThreads);
+ }
+
+ SECTION("multiple tasks reduce to one, which produces multiple tasks") {
+ atomic<std::size_t> counter{};
+
+ // All threads wait for counter, last thread creates 'store_id' tasks.
+ auto barrier = [&counter, &store_id](TaskSystem* ts) {
+ auto value = ++counter;
+ if (value == kNumThreads) {
+ counter.notify_all();
+
+ // Wait some time for other threads to go to sleep.
+ std::this_thread::sleep_for(1s);
+
+ // One task per thread (assumes round-robin push to queues).
+ for (std::size_t i{}; i < ts->NumberOfThreads(); ++i) {
+ ts->QueueTask([&store_id] { store_id(); });
+ }
+ }
+ else {
+ while (value != kNumThreads) {
+ counter.wait(value);
+ value = counter;
+ }
+ }
+ };
+
+ {
+ TaskSystem ts{kNumThreads};
+
+ // Wait some time for all threads to go to sleep.
+ std::this_thread::sleep_for(1s);
+
+ // One task per thread (assumes round-robin push to queues).
+ for (std::size_t i{}; i < ts.NumberOfThreads(); ++i) {
+ ts.QueueTask([&barrier, &ts] { barrier(&ts); });
+ }
+ }
+ CHECK(tids.size() == kNumThreads);
+ }
+}
diff --git a/test/main.cpp b/test/main.cpp
new file mode 100644
index 00000000..2335726f
--- /dev/null
+++ b/test/main.cpp
@@ -0,0 +1,8 @@
+#define CATCH_CONFIG_RUNNER
+#include "catch2/catch.hpp"
+#include "test/utils/logging/log_config.hpp"
+
+auto main(int argc, char* argv[]) -> int {
+ ConfigureLogging();
+ return Catch::Session().run(argc, argv);
+}
diff --git a/test/utils/TARGETS b/test/utils/TARGETS
new file mode 100644
index 00000000..0ee4d6cc
--- /dev/null
+++ b/test/utils/TARGETS
@@ -0,0 +1,57 @@
+{ "container_matchers":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["container_matchers"]
+ , "hdrs": ["container_matchers.hpp"]
+ , "deps": [["@", "catch2", "", "catch2"]]
+ , "stage": ["test", "utils"]
+ }
+, "execution_bazel":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["execution_bazel"]
+ , "hdrs": ["remote_execution/bazel_action_creator.hpp"]
+ , "deps":
+ [ ["@", "gsl-lite", "", "gsl-lite"]
+ , ["src/buildtool/execution_api/remote", "bazel_network"]
+ ]
+ , "stage": ["test", "utils"]
+ }
+, "log_config":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["log_config"]
+ , "hdrs": ["logging/log_config.hpp"]
+ , "deps": [["src/buildtool/logging", "logging"]]
+ , "stage": ["test", "utils"]
+ }
+, "test_env":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["test_env"]
+ , "hdrs": ["test_env.hpp"]
+ , "deps": ["log_config"]
+ , "stage": ["test", "utils"]
+ }
+, "local_hermeticity":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["local_hermeticity"]
+ , "hdrs": ["hermeticity/local.hpp"]
+ , "deps":
+ [ ["src/buildtool/common", "common"]
+ , ["src/buildtool/execution_api/local", "config"]
+ , ["src/buildtool/file_system", "file_system_manager"]
+ , ["src/buildtool/logging", "logging"]
+ ]
+ , "stage": ["test", "utils"]
+ }
+, "catch-main-remote-execution":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["catch-main-remote-execution"]
+ , "srcs": ["remote_execution/main-remote-execution.cpp"]
+ , "deps":
+ [ ["@", "catch2", "", "catch2"]
+ , ["src/buildtool/execution_api/remote", "config"]
+ , "log_config"
+ , "test_env"
+ ]
+ , "stage": ["test", "utils"]
+ }
+, "TESTS": {"type": "install", "tainted": ["test"]}
+} \ No newline at end of file
diff --git a/test/utils/container_matchers.hpp b/test/utils/container_matchers.hpp
new file mode 100644
index 00000000..c52a7d16
--- /dev/null
+++ b/test/utils/container_matchers.hpp
@@ -0,0 +1,174 @@
+#ifndef INCLUDED_SRC_TEST_UTILS_CONTAINER_MATCHERS_HPP
+#define INCLUDED_SRC_TEST_UTILS_CONTAINER_MATCHERS_HPP
+
+#include <algorithm>
+#include <sstream>
+#include <string>
+#include <type_traits>
+#include <unordered_set>
+#include <vector>
+
+#include "catch2/catch.hpp"
+
+/// \brief Matcher to check if the sets of elements present in two different
+/// containers are the same
+template <class LeftContainer, class RightContainer>
+class UniqueElementsUnorderedMatcher
+ : public Catch::MatcherBase<LeftContainer> {
+
+ RightContainer rhs_;
+
+ public:
+ // Note that in the case of an associative container with type C,
+ // C::value_type == std::pair<C::key_type const, C::mapped_type>.
+ // That would be a problem as we will be using
+ // std::unordered_set<C::value_type> So don't use this class in the case you
+ // want to compare two (multi)maps (ordered/unordered)
+ using value_type = typename LeftContainer::value_type;
+ using T = value_type;
+ static_assert(
+ std::is_constructible<T, typename RightContainer::value_type>::value,
+ "Value type of container in the left hand side must be constructible "
+ "from that of the right hand side.");
+
+ explicit UniqueElementsUnorderedMatcher(RightContainer const& rc)
+ : rhs_(rc) {}
+
+ UniqueElementsUnorderedMatcher() = delete;
+
+ // Method that produces the result to be evaluated
+ [[nodiscard]] auto match(LeftContainer const& lc) const -> bool override {
+ return IsEqualToRHS(
+ std::unordered_set<T>(std::begin(lc), std::end(lc)));
+ }
+
+ [[nodiscard]] auto describe() const -> std::string override {
+ std::ostringstream ss;
+ ss << "\nhas the same unique elements as\n{";
+ auto elem_it = std::begin(rhs_);
+ if (elem_it != std::end(rhs_)) {
+ ss << *elem_it;
+ ++elem_it;
+ for (; elem_it != std::end(rhs_); ++elem_it) {
+ ss << ", " << *elem_it;
+ }
+ }
+ ss << "}.";
+ return ss.str();
+ }
+
+ private:
+ [[nodiscard]] auto IsEqualToRHS(std::unordered_set<T> const& lhs) const
+ -> bool {
+ std::unordered_set<T> rhs(std::begin(rhs_), std::end(rhs_));
+ for (auto const& elem : lhs) {
+ auto elem_it_rhs = rhs.find(elem);
+ if (elem_it_rhs == std::end(rhs)) {
+ return false;
+ }
+ rhs.erase(elem_it_rhs);
+ }
+ return rhs.empty();
+ }
+};
+
+template <class LeftContainer, class RightContainer>
+inline auto HasSameUniqueElementsAs(RightContainer const& rc)
+ -> UniqueElementsUnorderedMatcher<LeftContainer, RightContainer> {
+ return UniqueElementsUnorderedMatcher<LeftContainer, RightContainer>(rc);
+}
+
+template <class LeftContainer, class T>
+inline auto HasSameUniqueElementsAs(std::initializer_list<T> const& rc)
+ -> UniqueElementsUnorderedMatcher<LeftContainer, std::initializer_list<T>> {
+ return UniqueElementsUnorderedMatcher<LeftContainer,
+ std::initializer_list<T>>(rc);
+}
+
+/// \brief Matcher to compare the contents of two containers up to permutation
+template <class LeftContainer>
+class ContainerUnorderedMatcher : public Catch::MatcherBase<LeftContainer> {
+ public:
+ using value_type = typename LeftContainer::value_type;
+ using T = value_type;
+
+ explicit ContainerUnorderedMatcher(std::vector<T> const& rc) : rhs_(rc) {}
+
+ ContainerUnorderedMatcher() = delete;
+
+ // Method that produces the result to be evaluated
+ [[nodiscard]] auto match(LeftContainer const& lc) const -> bool override {
+ return IsEqualToRHS(std::vector<T>(std::begin(lc), std::end(lc)));
+ }
+
+ [[nodiscard]] auto describe() const -> std::string override {
+ std::ostringstream ss;
+ ss << "\nhas the same elements as\n{";
+ auto elem_it = std::begin(rhs_);
+ if (elem_it != std::end(rhs_)) {
+ ss << *elem_it;
+ ++elem_it;
+ for (; elem_it != std::end(rhs_); ++elem_it) {
+ ss << ", " << *elem_it;
+ }
+ }
+ ss << "}.";
+ return ss.str();
+ }
+
+ private:
+ std::vector<T> rhs_;
+
+ /// \brief Compare containers by checking they have the same elements
+ /// (repetitions included). This implementation is not optimal, but it
+ /// doesn't require that the type T = LeftContainer::value_type has
+ /// known-to-STL hashing function or partial order (<)
+ [[nodiscard]] auto IsEqualToRHS(std::vector<T> const& lhs) const -> bool {
+ if (std::size(lhs) != std::size(rhs_)) {
+ return false;
+ }
+
+ // Get iterators to the rhs vector, we will remove iterators of elements
+ // found from this vector in order to account for repetitions
+ std::vector<typename std::vector<T>::const_iterator> iterators_to_check(
+ rhs_.size());
+ std::iota(std::begin(iterators_to_check),
+ std::end(iterators_to_check),
+ std::begin(rhs_));
+
+ // Instead of removing elements from the vector, as this would mean
+ // moving O(n) of them, we swap them to the back of the vector and keep
+ // track of what's the last element that has to be checked.
+ // This is similar to std::remove, but we are only interested in doing
+ // it for one element at a time.
+ auto last_to_check = std::end(iterators_to_check);
+ auto check_exists_and_remove = [&iterators_to_check,
+ &last_to_check](T const& elem) {
+ auto it_to_elem =
+ std::find_if(std::begin(iterators_to_check),
+ last_to_check,
+ [&elem](auto iter) { return *iter == elem; });
+ if (it_to_elem == last_to_check) {
+ return false;
+ }
+ --last_to_check;
+ std::iter_swap(it_to_elem, last_to_check);
+ return true;
+ };
+ for (auto const& element : lhs) {
+ if (not check_exists_and_remove(element)) {
+ return false;
+ }
+ }
+ return true;
+ }
+};
+
+template <class LeftContainer>
+inline auto HasSameElementsAs(
+ std::vector<typename LeftContainer::value_type> const& rc)
+ -> ContainerUnorderedMatcher<LeftContainer> {
+ return ContainerUnorderedMatcher<LeftContainer>(rc);
+}
+
+#endif // INCLUDED_SRC_TEST_UTILS_CONTAINER_MATCHERS_HPP
diff --git a/test/utils/hermeticity/local.hpp b/test/utils/hermeticity/local.hpp
new file mode 100644
index 00000000..5e844b50
--- /dev/null
+++ b/test/utils/hermeticity/local.hpp
@@ -0,0 +1,39 @@
+#ifndef INCLUDED_SRC_TEST_UTILS_HERMETICITY_LOCAL_HPP
+#define INCLUDED_SRC_TEST_UTILS_HERMETICITY_LOCAL_HPP
+
+#include "src/buildtool/common/statistics.hpp"
+#include "src/buildtool/execution_api/local/config.hpp"
+#include "src/buildtool/file_system/file_system_manager.hpp"
+#include "src/buildtool/logging/logger.hpp"
+
+class HermeticLocalTestFixture {
+ public:
+ HermeticLocalTestFixture() noexcept {
+ Statistics::Instance().Reset();
+ CreateAndSetCleanDiskCache(id_++);
+ }
+
+ private:
+ static inline int id_{};
+
+ static void CreateAndSetCleanDiskCache(int case_id) {
+ auto test_dir = FileSystemManager::GetCurrentDirectory();
+ auto case_dir = test_dir / "tmp" / ("case_" + std::to_string(case_id));
+
+ if (FileSystemManager::RemoveDirectory(case_dir, true) and
+ FileSystemManager::CreateDirectoryExclusive(case_dir) and
+ LocalExecutionConfig::SetBuildRoot(case_dir)) {
+ Logger::Log(LogLevel::Debug,
+ "created test-local cache dir {}",
+ case_dir.string());
+ }
+ else {
+ Logger::Log(LogLevel::Error,
+ "failed to create a test-local cache dir {}",
+ case_dir.string());
+ std::exit(EXIT_FAILURE);
+ }
+ }
+};
+
+#endif // INCLUDED_SRC_TEST_UTILS_HERMETICITY_LOCAL_HPP
diff --git a/test/utils/logging/log_config.hpp b/test/utils/logging/log_config.hpp
new file mode 100644
index 00000000..9f243d83
--- /dev/null
+++ b/test/utils/logging/log_config.hpp
@@ -0,0 +1,42 @@
+#ifndef INCLUDED_SRC_TEST_UTILS_LOGGING_LOG_CONFIG_HPP
+#define INCLUDED_SRC_TEST_UTILS_LOGGING_LOG_CONFIG_HPP
+
+#include <cstdlib>
+
+#include "src/buildtool/logging/log_config.hpp"
+#include "src/buildtool/logging/log_sink_cmdline.hpp"
+
+static auto ReadLogLevelFromEnv() -> LogLevel {
+ LogLevel const kDefaultTestLogLevel{LogLevel::Error};
+ LogLevel const kMaximumTestLogLevel{LogLevel::Trace};
+
+ auto log_level{kDefaultTestLogLevel};
+
+ auto* log_level_str = std::getenv("LOG_LEVEL_TESTS");
+ if (log_level_str not_eq nullptr) {
+ try {
+ log_level = static_cast<LogLevel>(std::stoul(log_level_str));
+ } catch (std::exception&) {
+ log_level = kDefaultTestLogLevel;
+ }
+ }
+
+ switch (log_level) {
+ case LogLevel::Error:
+ case LogLevel::Warning:
+ case LogLevel::Info:
+ case LogLevel::Debug:
+ case LogLevel::Trace:
+ return log_level;
+ }
+
+ // log level is out of range
+ return kMaximumTestLogLevel;
+}
+
+[[maybe_unused]] static inline void ConfigureLogging() {
+ LogConfig::SetLogLimit(ReadLogLevelFromEnv());
+ LogConfig::SetSinks({LogSinkCmdLine::CreateFactory(false /*no color*/)});
+}
+
+#endif // INCLUDED_SRC_TEST_UTILS_LOGGING_LOG_CONFIG_HPP
diff --git a/test/utils/remote_execution/bazel_action_creator.hpp b/test/utils/remote_execution/bazel_action_creator.hpp
new file mode 100644
index 00000000..27864408
--- /dev/null
+++ b/test/utils/remote_execution/bazel_action_creator.hpp
@@ -0,0 +1,75 @@
+#ifndef INCLUDED_SRC_TEST_UTILS_REMOTE_EXECUTION_ACTION_CREATOR_HPP
+#define INCLUDED_SRC_TEST_UTILS_REMOTE_EXECUTION_ACTION_CREATOR_HPP
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "gsl-lite/gsl-lite.hpp"
+#include "src/buildtool/crypto/hash_generator.hpp"
+#include "src/buildtool/execution_api/remote/bazel/bazel_cas_client.hpp"
+#include "src/buildtool/execution_api/remote/config.hpp"
+
+[[nodiscard]] static inline auto CreateAction(
+ std::string const& instance_name,
+ std::vector<std::string> const& args,
+ std::map<std::string, std::string> const& env_vars,
+ std::map<std::string, std::string> const& properties) noexcept
+ -> std::unique_ptr<bazel_re::Digest> {
+ auto const& info = RemoteExecutionConfig::Instance();
+
+ auto platform = std::make_unique<bazel_re::Platform>();
+ for (auto const& [name, value] : properties) {
+ bazel_re::Platform_Property property;
+ property.set_name(name);
+ property.set_value(value);
+ *(platform->add_properties()) = property;
+ }
+
+ std::vector<BazelBlob> blobs;
+
+ bazel_re::Command cmd;
+ cmd.set_allocated_platform(platform.release());
+ std::copy(
+ args.begin(), args.end(), pb::back_inserter(cmd.mutable_arguments()));
+
+ std::transform(std::begin(env_vars),
+ std::end(env_vars),
+ pb::back_inserter(cmd.mutable_environment_variables()),
+ [](auto const& name_value) {
+ bazel_re::Command_EnvironmentVariable env_var_message;
+ env_var_message.set_name(name_value.first);
+ env_var_message.set_value(name_value.second);
+ return env_var_message;
+ });
+
+ auto cmd_data = cmd.SerializeAsString();
+ auto cmd_id = ArtifactDigest::Create(cmd_data);
+ blobs.emplace_back(cmd_id, cmd_data);
+
+ bazel_re::Directory empty_dir;
+ auto dir_data = empty_dir.SerializeAsString();
+ auto dir_id = ArtifactDigest::Create(dir_data);
+ blobs.emplace_back(dir_id, dir_data);
+
+ bazel_re::Action action;
+ action.set_allocated_command_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{cmd_id}});
+ action.set_do_not_cache(false);
+ action.set_allocated_input_root_digest(
+ gsl::owner<bazel_re::Digest*>{new bazel_re::Digest{dir_id}});
+
+ auto action_data = action.SerializeAsString();
+ auto action_id = ArtifactDigest::Create(action_data);
+ blobs.emplace_back(action_id, action_data);
+
+ BazelCasClient cas_client(info.Host(), info.Port());
+
+ if (cas_client.BatchUpdateBlobs(instance_name, blobs.begin(), blobs.end())
+ .size() == blobs.size()) {
+ return std::make_unique<bazel_re::Digest>(action_id);
+ }
+ return nullptr;
+}
+
+#endif // INCLUDED_SRC_TEST_UTILS_REMOTE_EXECUTION_ACTION_CREATOR_HPP
diff --git a/test/utils/remote_execution/main-remote-execution.cpp b/test/utils/remote_execution/main-remote-execution.cpp
new file mode 100755
index 00000000..3f435dcc
--- /dev/null
+++ b/test/utils/remote_execution/main-remote-execution.cpp
@@ -0,0 +1,50 @@
+#define CATCH_CONFIG_RUNNER
+#include <chrono>
+#include <cstdlib>
+#include <iostream>
+#include <sstream>
+#include <thread>
+
+#include "catch2/catch.hpp"
+#include "src/buildtool/execution_api/remote/config.hpp"
+#include "test/utils/logging/log_config.hpp"
+#include "test/utils/test_env.hpp"
+
+namespace {
+
+void wait_for_grpc_to_shutdown() {
+ // grpc_shutdown_blocking(); // not working
+ std::this_thread::sleep_for(std::chrono::seconds(1));
+}
+
+/// \brief Configure remote execution from test environment. In case the
+/// environment variable is malformed, we write a message and stop execution.
+/// \returns true If remote execution was successfully configured.
+[[nodiscard]] auto ConfigureRemoteExecution() -> bool {
+ auto address = ReadRemoteAddressFromEnv();
+ auto& config = RemoteExecutionConfig::Instance();
+ if (address and not config.SetAddress(*address)) {
+ Logger::Log(LogLevel::Error, "parsing address '{}' failed.", *address);
+ std::exit(EXIT_FAILURE);
+ }
+ return config.IsValidAddress();
+}
+
+} // namespace
+
+auto main(int argc, char* argv[]) -> int {
+ ConfigureLogging();
+
+ // In case remote execution address is not valid, we skip tests. This is in
+ // order to avoid tests being dependent on the environment.
+ if (not ConfigureRemoteExecution()) {
+ return EXIT_SUCCESS;
+ }
+
+ int result = Catch::Session().run(argc, argv);
+
+ // valgrind fails if we terminate before grpc's async shutdown threads exit
+ wait_for_grpc_to_shutdown();
+
+ return result;
+}
diff --git a/test/utils/test_env.hpp b/test/utils/test_env.hpp
new file mode 100644
index 00000000..3bc49901
--- /dev/null
+++ b/test/utils/test_env.hpp
@@ -0,0 +1,44 @@
+#ifndef INCLUDED_SRC_TEST_UTILS_TEST_ENV_HPP
+#define INCLUDED_SRC_TEST_UTILS_TEST_ENV_HPP
+
+#include <cstdlib>
+#include <map>
+#include <optional>
+#include <sstream>
+#include <string>
+
+#include "test/utils/logging/log_config.hpp"
+
+[[nodiscard]] static inline auto ReadPlatformPropertiesFromEnv()
+ -> std::map<std::string, std::string> {
+ std::map<std::string, std::string> properties{};
+ auto* execution_props = std::getenv("REMOTE_EXECUTION_PROPERTIES");
+ if (execution_props not_eq nullptr) {
+ std::istringstream pss(std::string{execution_props});
+ std::string keyval_pair;
+ while (std::getline(pss, keyval_pair, ';')) {
+ std::istringstream kvss{keyval_pair};
+ std::string key;
+ std::string val;
+ if (not std::getline(kvss, key, ':') or
+ not std::getline(kvss, val, ':')) {
+ Logger::Log(LogLevel::Error,
+ "parsing property '{}' failed.",
+ keyval_pair);
+ std::exit(EXIT_FAILURE);
+ }
+ properties.emplace(std::move(key), std::move(val));
+ }
+ }
+ return properties;
+}
+
+[[nodiscard]] static inline auto ReadRemoteAddressFromEnv()
+ -> std::optional<std::string> {
+ auto* execution_address = std::getenv("REMOTE_EXECUTION_ADDRESS");
+ return execution_address == nullptr
+ ? std::nullopt
+ : std::make_optional(std::string{execution_address});
+}
+
+#endif // INCLUDED_SRC_TEST_UTILS_TEST_ENV_HPP
diff --git a/test/utils/typed_testfixtures.py b/test/utils/typed_testfixtures.py
new file mode 100644
index 00000000..e601cf3c
--- /dev/null
+++ b/test/utils/typed_testfixtures.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+
+import typing
+import testfixtures
+
+
+def compare(*args: typing.Any, **kw: typing.Any) -> None:
+ testfixtures.compare(*args, **kw) # type: ignore
+
+
+class ShouldRaise(testfixtures.ShouldRaise): # type: ignore
+ pass \ No newline at end of file