summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSascha Roloff <sascha.roloff@huawei.com>2022-10-24 16:07:35 +0200
committerSascha Roloff <sascha.roloff@huawei.com>2022-11-02 14:53:05 +0100
commitde7308ef8128751211b102884fa37be4cc814d2d (patch)
treedd2671eb1a7a59a5f7f4880c747f04538d356ff0
parent9bb4ace1685793d4e58497161672de594a368785 (diff)
downloadjustbuild-de7308ef8128751211b102884fa37be4cc814d2d.tar.gz
Improved test case for target-level cached artifacts synchronization
This commit extends the test case for target-level cached artifacts synchronization to include all locations of a target-cache entry where known artifacts can be mentioned. These locations are the 'artifacts', 'runfiles', and 'provides' map and all are considered now as part of the test.
-rw-r--r--test/end-to-end/target-cache/TARGETS9
-rw-r--r--test/end-to-end/target-cache/artifacts-sync.sh290
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/TARGETS4
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/ROOT0
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/TARGETS6
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/include/TARGETS6
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/include/greet.hpp17
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/src/TARGETS7
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/src/greet.cpp21
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/main/TARGETS7
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/main/main.cpp19
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/repos.json16
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/ROOT0
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/RULES316
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/TARGETS5
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/bar.py15
-rwxr-xr-xtest/end-to-end/target-cache/test-data-artifacts-sync/pydicts/dict_converter.py39
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/foo.py15
-rw-r--r--test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/repos.json5
19 files changed, 710 insertions, 87 deletions
diff --git a/test/end-to-end/target-cache/TARGETS b/test/end-to-end/target-cache/TARGETS
index ed8d75ff..853335a9 100644
--- a/test/end-to-end/target-cache/TARGETS
+++ b/test/end-to-end/target-cache/TARGETS
@@ -8,8 +8,15 @@
{ "type": ["@", "rules", "shell/test", "script"]
, "name": ["artifacts-sync"]
, "test": ["artifacts-sync.sh"]
- , "deps": [["test/end-to-end", "tool-under-test"], ["", "bin/just-mr.py"]]
+ , "deps":
+ [ ["test/end-to-end", "tool-under-test"]
+ , ["./", "test-data-artifacts-sync", "greetlib"]
+ , ["./", "test-data-artifacts-sync", "pydicts"]
+ , "bootstrap-src-staged"
+ ]
}
+, "bootstrap-src-staged":
+ {"type": "install", "dirs": [[["", "bootstrap-src"], "foo"]]}
, "TESTS":
{ "type": "install"
, "tainted": ["test"]
diff --git a/test/end-to-end/target-cache/artifacts-sync.sh b/test/end-to-end/target-cache/artifacts-sync.sh
index 64003964..c4f75f51 100644
--- a/test/end-to-end/target-cache/artifacts-sync.sh
+++ b/test/end-to-end/target-cache/artifacts-sync.sh
@@ -13,30 +13,48 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
set -eu
-# This test requires remote execution and is skipped in case of local execution.
-# The test is separated into two phases. In phase I, the execution IDs for the
-# local and the remote execution backends are determined. In phase II, the
-# actual test is executed, which is explained in the next section.
-
-# In order to test the synchronization of target-level cached remote artifacts
-# to the local CAS and back, the following test has been designed. The test
-# needs to verify that if any artifact that is mentioned in a target cache entry
-# is garbage collected on the remote side is actually uploaded by the
-# synchronization mechanism and will be available for remote action execution.
-# In order to "emulate" the behavior of a garbage collected file or tree on the
+# This test tests the synchronization mechanism of target-level-cached remote
+# artifacts to the local CAS and back. It requires remote execution and is
+# skipped in case of local execution. The test is separated into three phases.
+# In phase I, the execution IDs for the local and the remote execution backends
+# are determined. In phase II, artifacts mentioned in the 'artifacts' and the
+# 'runfiles' map of a target-cache entry are tested. In phase III, artifacts
+# mentioned in the 'provides' map of a target-cache entry are tested.
+
+# In order to test the synchronization of target-level-cached remote artifacts
+# to the local CAS and back, the following testing technique has been designed.
+# The test needs to verify that any artifact that is mentioned in a target-cache
+# entry and is garbage collected on the remote side is uploaded again by the
+# synchronization mechanism and thus making it available for remote action
+# execution. A complete verification needs to test the download (backup)
+# direction of the synchronization mechanism and the upload mechanism. Thus
+# phase II and III are separated in part A and B to test the download and upload
+# accordingly. To test the download direction, a remote execution is triggered
+# and artifacts mentioned in the target-cache entry should be available in the
+# local CAS. To test the upload direction, we need a garbage collection of some
+# backed-up files, which have been downloaded during a remote execution. In
+# order to "emulate" the behavior of a garbage-collected file or tree on the
# remote side, a simple project with parts eligible for target-level caching is
-# built locally and the target cache entry is pretended to be created for the
+# built locally and the target-cache entry is pretended to be created for the
# remote execution backend. If then remote execution is triggered, the files or
# trees are not available on the remote side, so they need to be uploaded from
-# the local CAS. After the artifacts have been uploaded to the remote CAS, they
-# would be available for the next test run, thus, random strings are used in the
-# affected text files to vary the according hash value.
-
-readonly JUST="$PWD/bin/tool-under-test"
-readonly JUST_MR="$PWD/bin/just-mr.py"
+# the local CAS. Since there are three locations within a target-cache entry
+# where artifacts can be mentioned, all three locations need to be tested,
+# 'artifacts' and 'runfiles' map in test phase II and 'provides' map in test
+# phase III.
+
+# Since this test works with KNOWN artifacts, once an artifact is uploaded to
+# the remote side as part of a test execution, it is known with that hash and
+# would not cause an error in the next test execution. Thus, we inject random
+# strings in the affected source files to create artifacts not yet known to the
+# remote side.
+
+readonly ROOT="$PWD"
+readonly JUST="$ROOT/bin/tool-under-test"
+readonly JUST_MR="$ROOT/foo/bin/just-mr.py"
+readonly JUST_RULES="$ROOT/foo/rules"
readonly LBRDIR="$TEST_TMPDIR/local-build-root"
readonly TESTDIR="$TEST_TMPDIR/test-root"
@@ -44,26 +62,44 @@ if [ "${REMOTE_EXECUTION_ADDRESS:-}" = "" ]; then
echo
echo "Test skipped, since no remote execution is specified."
echo
+ return
+fi
+
+REMOTE_EXECUTION_ARGS="-r $REMOTE_EXECUTION_ADDRESS"
+if [ "${REMOTE_EXECUTION_PROPERTIES:-}" != "" ]; then
+ REMOTE_EXECUTION_ARGS="$REMOTE_EXECUTION_ARGS --remote-execution-property $REMOTE_EXECUTION_PROPERTIES"
+fi
+
+if [ "${COMPATIBLE:-}" = "YES" ]; then
+ ARGS="--compatible"
+ HASH_TYPE="compatible-sha256"
else
- REMOTE_EXECUTION_ARGS="-r $REMOTE_EXECUTION_ADDRESS"
-
- if [ "${REMOTE_EXECUTION_PROPERTIES:-}" != "" ]; then
- REMOTE_EXECUTION_ARGS="$REMOTE_EXECUTION_ARGS --remote-execution-property $REMOTE_EXECUTION_PROPERTIES"
- fi
-
- if [ "${COMPATIBLE:-}" = "YES" ]; then
- ARGS="--compatible"
- TCDIR="$LBRDIR/protocol-dependent/generation-0/compatible-sha256/tc"
- else
- ARGS=""
- TCDIR="$LBRDIR/protocol-dependent/generation-0/git-sha1/tc"
- fi
-
- # create common test files
- mkdir -p "$TESTDIR"
- cd "$TESTDIR"
- touch ROOT
- cat > repos.json <<EOF
+ ARGS=""
+ HASH_TYPE="git-sha1"
+fi
+TCDIR="$LBRDIR/protocol-dependent/generation-0/$HASH_TYPE/tc"
+
+# Print the CASF hash of the first target cache entry found for a given backend
+# (parameter $1)
+get_tc_hash() {
+ TC_HASH0=$(ls -1 "$TCDIR/$1" | head -n1)
+ TC_HASH1=$(ls -1 "$TCDIR/$1/$TC_HASH0" | head -n1)
+ cat "$TCDIR/$1/$TC_HASH0/$TC_HASH1" | tr -d '[]' | cut -d: -f1
+}
+
+# ------------------------------------------------------------------------------
+# Test Phase I: Determine local and remote execution ID
+# ------------------------------------------------------------------------------
+
+echo
+echo "Test phase I"
+echo
+
+# Create test files
+mkdir -p "$TESTDIR"
+cd "$TESTDIR"
+touch ROOT
+cat > repos.json <<EOF
{ "repositories":
{ "main":
{"repository": {"type": "file", "path": ".", "pragma": {"to_git": true}}}
@@ -71,63 +107,145 @@ else
}
EOF
- # ----------------------------------------------------------------------------
- # Test Phase I
- # ----------------------------------------------------------------------------
-
- cat > TARGETS.p1 <<EOF
+cat > TARGETS <<EOF
{ "main": {"type": "export", "target": ["./", "main-target"]}
, "main-target":
{"type": "generic", "cmds": ["echo foo > foo.txt"], "outs": ["foo.txt"]}
}
EOF
- export CONF="$("$JUST_MR" -C repos.json --local-build-root="$LBRDIR" setup main)"
-
- # determine local execution ID
- "$JUST" build -C "$CONF" main --local-build-root="$LBRDIR" --target-file-name TARGETS.p1 $ARGS > /dev/null 2>&1
- readonly LOCAL_EXECUTION_ID=$(ls "$TCDIR")
- rm -rf "$TCDIR"
-
- # determine remote execution ID
- "$JUST" build -C "$CONF" main --local-build-root="$LBRDIR" --target-file-name TARGETS.p1 $ARGS $REMOTE_EXECUTION_ARGS > /dev/null 2>&1
- readonly REMOTE_EXECUTION_ID=$(ls "$TCDIR")
- rm -rf "$TCDIR"
+# Determine local execution ID
+"$JUST_MR" --norc --just "$JUST" --local-build-root "$LBRDIR" build main $ARGS
+readonly LOCAL_EXECUTION_ID=$(ls -1 "$TCDIR" | head -n1)
+echo "Local execution ID: $LOCAL_EXECUTION_ID"
+rm -rf "$TCDIR"
+
+# Determine remote execution ID
+"$JUST_MR" --norc --just "$JUST" --local-build-root "$LBRDIR" build main $ARGS $REMOTE_EXECUTION_ARGS
+readonly REMOTE_EXECUTION_ID=$(ls -1 "$TCDIR" | head -n1)
+echo "Remote execution ID: $REMOTE_EXECUTION_ID"
+rm -rf "$TCDIR"
+
+# Clean up test files
+rm -rf "$TESTDIR" "$LBRDIR"
+cd "$ROOT"
+
+# ------------------------------------------------------------------------------
+# Test Phase II: Test artifacts sync of 'artifacts' and 'runfiles' map
+# ------------------------------------------------------------------------------
+
+echo
+echo "Test phase II"
+echo
+
+# Copy greetlib test files
+cp -r "$ROOT/greetlib" "$TESTDIR"
+cd "$TESTDIR"
+
+# Inject rules path into repos.json
+sed -i "s|<RULES_PATH>|$JUST_RULES|" repos.json
+
+# A) TEST DOWNLOAD
+# ----------------
+echo "Check artifacts download"
+
+# Inject random string into source files
+RANDOM_STRING=$(hostname).$(date +%s%N).$$
+sed -i "s|RANDOM_STRING_1 \".*\"|RANDOM_STRING_1 \"$RANDOM_STRING\"|" greet/include/greet.hpp
+sed -i "s|RANDOM_STRING_2 \".*\"|RANDOM_STRING_2 \"$RANDOM_STRING\"|" greet/src/greet.cpp
+
+# Build greetlib remotely
+"$JUST_MR" --norc --just "$JUST" --local-build-root "$LBRDIR" --main main build main $ARGS $REMOTE_EXECUTION_ARGS
+
+# Check if file and tree artifacts have been downloaded correctly
+readonly TC_HASH=$(get_tc_hash $REMOTE_EXECUTION_ID)
+readonly TC_ENTRY=$("$JUST" install-cas --local-build-root "$LBRDIR" $ARGS ${TC_HASH})
+readonly FILE_HASH=$(echo $TC_ENTRY | jq -r '.artifacts."libgreet.a".data.id')
+readonly TREE_HASH=$(echo $TC_ENTRY | jq -r '.runfiles.greet.data.id')
+"$JUST" install-cas --local-build-root "$LBRDIR" $ARGS ${FILE_HASH} > /dev/null
+"$JUST" install-cas --local-build-root "$LBRDIR" $ARGS ${TREE_HASH} > /dev/null
+
+# B) TEST UPLOAD
+# --------------
+echo "Check artifacts upload"
+
+# Inject random string into source files
+RANDOM_STRING=$(hostname).$(date +%s%N).$$
+sed -i "s|RANDOM_STRING_1 \".*\"|RANDOM_STRING_1 \"$RANDOM_STRING\"|" greet/include/greet.hpp
+sed -i "s|RANDOM_STRING_2 \".*\"|RANDOM_STRING_2 \"$RANDOM_STRING\"|" greet/src/greet.cpp
+
+# Build greetlib locally
+"$JUST_MR" --norc --just "$JUST" --local-build-root "$LBRDIR" --main main build main $ARGS
+
+# Modify target cache origin
+mv "$TCDIR/$LOCAL_EXECUTION_ID" "$TCDIR/$REMOTE_EXECUTION_ID"
+
+# Check if greetlib successfully builds remotely
+"$JUST_MR" --norc --just "$JUST" --local-build-root "$LBRDIR" --main main build main $ARGS $REMOTE_EXECUTION_ARGS
+
+# Clean up test files
+rm -rf "$TESTDIR" "$LBRDIR"
+cd "$ROOT"
+
+# ------------------------------------------------------------------------------
+# Test Phase III: Test artifacts sync of 'provides' map
+# ------------------------------------------------------------------------------
+
+echo
+echo "Test phase III"
+echo
+
+# Copy pydicts test files
+cp -r "$ROOT/pydicts" "$TESTDIR"
+cd "$TESTDIR"
+
+# A) TEST DOWNLOAD
+# ----------------
+echo "Check artifacts download"
+
+# Inject random string into source files
+RANDOM_STRING=$(hostname).$(date +%s%N).$$
+sed -i "s|\"foo\": \"[^\"]*\"|\"foo\": \"$RANDOM_STRING\"|" foo.py
+sed -i "s|\"foo\": \"[^\"]*\"|\"foo\": \"$RANDOM_STRING\"|" bar.py
+
+# Build pydicts remotely
+"$JUST_MR" --norc --just "$JUST" --local-build-root "$LBRDIR" build json_from_py $ARGS $REMOTE_EXECUTION_ARGS
+
+# 'exported_py' target contains a provides map,
+# which contains an abstract node (type 'convert'),
+# which contains value nodes,
+# which contain target results,
+# which contain KNOWN artifacts {foo,bar}.py
+
+# Check if {foo,bar}.py have been downloaded correctly
+if [ "${COMPATIBLE:-}" = "YES" ]; then
+ readonly FOO_HASH=$(cat foo.py | sha256sum | cut -d' ' -f1)
+ readonly BAR_HASH=$(cat bar.py | sha256sum | cut -d' ' -f1)
+else
+ readonly FOO_HASH=$(cat foo.py | git hash-object --stdin)
+ readonly BAR_HASH=$(cat bar.py | git hash-object --stdin)
+fi
+"$JUST" install-cas --local-build-root "$LBRDIR" $ARGS ${FOO_HASH} > /dev/null
+"$JUST" install-cas --local-build-root "$LBRDIR" $ARGS ${BAR_HASH} > /dev/null
- # ----------------------------------------------------------------------------
- # Test Phase II
- # ----------------------------------------------------------------------------
+# B) TEST UPLOAD
+# --------------
+echo "Check artifacts upload"
- # create random string: p<hostname>p<time[ns]>p<pid>p<random number[9 digits]>
- readonly LOW=100000000
- readonly HIGH=999999999
- readonly RND="p$(hostname)p$(date +%s%N)p$$p$(shuf -i $LOW-$HIGH -n 1)"
+# Inject random string into source files
+RANDOM_STRING=$(hostname).$(date +%s%N).$$
+sed -i "s|\"foo\": \"[^\"]*\"|\"foo\": \"$RANDOM_STRING\"|" foo.py
+sed -i "s|\"foo\": \"[^\"]*\"|\"foo\": \"$RANDOM_STRING\"|" bar.py
- cat > TARGETS.p2 <<EOF
-{ "main": {"type": "export", "target": ["./", "main-target"]}
-, "main-target":
- { "type": "generic"
- , "cmds": ["echo $RND | tee foo.txt out/bar.txt"]
- , "outs": ["foo.txt"]
- , "out_dirs": ["out"]
- }
-}
-EOF
+# Build pydicts locally
+"$JUST_MR" --norc --just "$JUST" --local-build-root "$LBRDIR" build json_from_py $ARGS
- export CONF="$("$JUST_MR" -C repos.json --local-build-root="$LBRDIR" setup main)"
+# Modify target cache origin
+mv "$TCDIR/$LOCAL_EXECUTION_ID" "$TCDIR/$REMOTE_EXECUTION_ID"
- # build project locally
- echo
- echo "Build project locally"
- echo
- "$JUST" build -C "$CONF" main --local-build-root="$LBRDIR" --target-file-name TARGETS.p2 $ARGS 2>&1
-
- # pretend target cache entry being created for remote execution backend
- mv "$TCDIR/$LOCAL_EXECUTION_ID" "$TCDIR/$REMOTE_EXECUTION_ID"
+# Check if pydicts successfully builds remotely
+"$JUST_MR" --norc --just "$JUST" --local-build-root "$LBRDIR" build json_from_py $ARGS $REMOTE_EXECUTION_ARGS
- # build project remotely
- echo
- echo "Build project remotely"
- echo
- "$JUST" build -C "$CONF" main --local-build-root="$LBRDIR" --target-file-name TARGETS.p2 $ARGS $REMOTE_EXECUTION_ARGS 2>&1
-fi
+# Clean up test files
+rm -rf "$TESTDIR" "$LBRDIR"
+cd "$ROOT"
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/TARGETS b/test/end-to-end/target-cache/test-data-artifacts-sync/TARGETS
new file mode 100644
index 00000000..d04fc3aa
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/TARGETS
@@ -0,0 +1,4 @@
+{ "greetlib":
+ {"type": "install", "dirs": [[["TREE", null, "./greetlib"], "."]]}
+, "pydicts": {"type": "install", "dirs": [[["TREE", null, "./pydicts"], "."]]}
+}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/ROOT b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/ROOT
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/ROOT
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/TARGETS b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/TARGETS
new file mode 100644
index 00000000..53656455
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/TARGETS
@@ -0,0 +1,6 @@
+{ "greet":
+ { "type": "export"
+ , "target": ["src", "greetlib"]
+ , "flexible_config": ["CXX", "CXXFLAGS", "AR", "ENV"]
+ }
+}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/include/TARGETS b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/include/TARGETS
new file mode 100644
index 00000000..6093f6f4
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/include/TARGETS
@@ -0,0 +1,6 @@
+{ "hdrs":
+ { "type": ["@", "rules", "CC", "header directory"]
+ , "hdrs": [["TREE", null, "."]]
+ , "stage": ["greet"]
+ }
+}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/include/greet.hpp b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/include/greet.hpp
new file mode 100644
index 00000000..b4b866d7
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/include/greet.hpp
@@ -0,0 +1,17 @@
+// Copyright 2022 Huawei Cloud Computing Technology Co., Ltd.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include <string>
+#define RANDOM_STRING_1 ""
+void greet(std::string const& str);
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/src/TARGETS b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/src/TARGETS
new file mode 100644
index 00000000..0b7a344f
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/src/TARGETS
@@ -0,0 +1,7 @@
+{ "greetlib":
+ { "type": ["@", "rules", "CC", "library"]
+ , "name": ["greet"]
+ , "hdrs": [["include", "hdrs"]]
+ , "srcs": ["greet.cpp"]
+ }
+}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/src/greet.cpp b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/src/greet.cpp
new file mode 100644
index 00000000..0d898d24
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/greet/src/greet.cpp
@@ -0,0 +1,21 @@
+// Copyright 2022 Huawei Cloud Computing Technology Co., Ltd.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "greet/greet.hpp"
+
+#include <iostream>
+#define RANDOM_STRING_2 ""
+void greet(std::string const& str) {
+ std::cout << RANDOM_STRING_1 RANDOM_STRING_2 ": " << str << std::endl;
+}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/main/TARGETS b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/main/TARGETS
new file mode 100644
index 00000000..a3924f4d
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/main/TARGETS
@@ -0,0 +1,7 @@
+{ "main":
+ { "type": ["@", "rules", "CC", "binary"]
+ , "name": ["main"]
+ , "srcs": ["main.cpp"]
+ , "deps": [["@", "greet", "", "greet"]]
+ }
+}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/main/main.cpp b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/main/main.cpp
new file mode 100644
index 00000000..63e136c7
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/main/main.cpp
@@ -0,0 +1,19 @@
+// Copyright 2022 Huawei Cloud Computing Technology Co., Ltd.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "greet/greet.hpp"
+int main() {
+ greet("Hello, World!");
+ return 0;
+}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/repos.json b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/repos.json
new file mode 100644
index 00000000..24bebd2d
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/greetlib/repos.json
@@ -0,0 +1,16 @@
+{ "repositories":
+ { "main":
+ { "repository": {"type": "file", "path": "./main"}
+ , "bindings": {"rules": "rules", "greet": "greet"}
+ }
+ , "greet":
+ { "repository":
+ {"type": "file", "path": "./greet", "pragma": {"to_git": true}}
+ , "bindings": {"rules": "rules"}
+ }
+ , "rules":
+ { "repository":
+ {"type": "file", "path": "<RULES_PATH>", "pragma": {"to_git": true}}
+ }
+ }
+}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/ROOT b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/ROOT
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/ROOT
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/RULES b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/RULES
new file mode 100644
index 00000000..c9a1dbda
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/RULES
@@ -0,0 +1,316 @@
+{ "py_dicts":
+ { "target_fields": ["py_files"]
+ , "implicit": {"converter": [["FILE", null, "dict_converter.py"]]}
+ , "expression":
+ { "type": "RESULT"
+ , "artifacts": {"type": "empty_map"}
+ , "runfiles": {"type": "empty_map"}
+ , "provides":
+ { "type": "let*"
+ , "bindings":
+ [ [ "converter"
+ , { "type": "VALUE_NODE"
+ , "$1":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "d"
+ , "range": {"type": "FIELD", "name": "converter"}
+ , "body":
+ { "type": "DEP_ARTIFACTS"
+ , "dep": {"type": "var", "name": "d"}
+ }
+ }
+ }
+ , "runfiles": {"type": "empty_map"}
+ , "provides": {"type": "empty_map"}
+ }
+ }
+ ]
+ , [ "py_files"
+ , { "type": "VALUE_NODE"
+ , "$1":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "d"
+ , "range": {"type": "FIELD", "name": "py_files"}
+ , "body":
+ { "type": "DEP_ARTIFACTS"
+ , "dep": {"type": "var", "name": "d"}
+ }
+ }
+ }
+ , "runfiles": {"type": "empty_map"}
+ , "provides": {"type": "empty_map"}
+ }
+ }
+ ]
+ ]
+ , "body":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "py2json"
+ , "value":
+ [ { "type": "ABSTRACT_NODE"
+ , "string_fields":
+ { "type": "map_union"
+ , "$1":
+ [ {"type": "singleton_map", "key": "from", "value": ["py"]}
+ , {"type": "singleton_map", "key": "to", "value": ["json"]}
+ ]
+ }
+ , "target_fields":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "converter"
+ , "value": [{"type": "var", "name": "converter"}]
+ }
+ , { "type": "singleton_map"
+ , "key": "dicts"
+ , "value": [{"type": "var", "name": "py_files"}]
+ }
+ ]
+ }
+ , "node_type": "convert"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ }
+, "dicts_convert":
+ { "string_fields": ["from", "to"]
+ , "target_fields": ["converter", "dicts"]
+ , "config_vars": ["ext"]
+ , "expression":
+ { "type": "let*"
+ , "bindings":
+ [ ["from", {"type": "join", "$1": {"type": "FIELD", "name": "from"}}]
+ , ["to", {"type": "join", "$1": {"type": "FIELD", "name": "to"}}]
+ , [ "ext"
+ , { "type": "var"
+ , "name": "ext"
+ , "default":
+ { "type": "if"
+ , "cond":
+ {"type": "==", "$1": {"type": "var", "name": "to"}, "$2": "py"}
+ , "then": ".py"
+ , "else":
+ { "type": "if"
+ , "cond":
+ { "type": "=="
+ , "$1": {"type": "var", "name": "to"}
+ , "$2": "json"
+ }
+ , "then": ".json"
+ , "else": ".yaml"
+ }
+ }
+ }
+ ]
+ , [ "dicts"
+ , { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "d"
+ , "range": {"type": "FIELD", "name": "dicts"}
+ , "body":
+ {"type": "DEP_ARTIFACTS", "dep": {"type": "var", "name": "d"}}
+ }
+ }
+ ]
+ , [ "converter"
+ , { "type": "to_subdir"
+ , "subdir": "bin"
+ , "flat": false
+ , "$1":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "x"
+ , "range": {"type": "FIELD", "name": "converter"}
+ , "body":
+ {"type": "DEP_ARTIFACTS", "dep": {"type": "var", "name": "x"}}
+ }
+ }
+ }
+ ]
+ , [ "converter_bin"
+ , { "type": "join"
+ , "$1": {"type": "keys", "$1": {"type": "var", "name": "converter"}}
+ }
+ ]
+ ]
+ , "body":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach_map"
+ , "var_key": "path"
+ , "var_val": "file"
+ , "range": {"type": "var", "name": "dicts"}
+ , "body":
+ { "type": "let*"
+ , "bindings":
+ [ [ "out"
+ , { "type": "change_ending"
+ , "ending": {"type": "var", "name": "ext"}
+ , "$1": {"type": "var", "name": "path"}
+ }
+ ]
+ ]
+ , "body":
+ { "type": "ACTION"
+ , "cmd":
+ [ "/bin/sh"
+ , "-c"
+ , { "type": "join"
+ , "$1":
+ { "type": "++"
+ , "$1":
+ [ [{"type": "var", "name": "converter_bin"}]
+ , [ {"type": "var", "name": "from"}
+ , {"type": "var", "name": "to"}
+ ]
+ , [ {"type": "var", "name": "path"}
+ , ">"
+ , {"type": "var", "name": "out"}
+ ]
+ ]
+ }
+ , "separator": " "
+ }
+ ]
+ , "inputs":
+ { "type": "disjoint_map_union"
+ , "$1":
+ [ {"type": "var", "name": "converter"}
+ , { "type": "singleton_map"
+ , "key": {"type": "var", "name": "path"}
+ , "value": {"type": "var", "name": "file"}
+ }
+ ]
+ }
+ , "outs": [{"type": "var", "name": "out"}]
+ }
+ }
+ }
+ }
+ , "runfiles": {"type": "empty_map"}
+ , "provides": {"type": "empty_map"}
+ }
+ }
+ }
+, "json_dicts":
+ { "target_fields": ["py_dicts"]
+ , "implicit": {"converter": [["FILE", null, "dict_converter.py"]]}
+ , "anonymous":
+ { "from_py":
+ { "provider": "py2json"
+ , "rule_map": {"convert": "dicts_convert"}
+ , "target": "py_dicts"
+ }
+ }
+ , "expression":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "disjoint_map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "a"
+ , "range": {"type": "FIELD", "name": "from_py"}
+ , "body":
+ {"type": "DEP_ARTIFACTS", "dep": {"type": "var", "name": "a"}}
+ }
+ }
+ , "runfiles": {"type": "empty_map"}
+ , "provides":
+ { "type": "let*"
+ , "bindings":
+ [ [ "converter"
+ , { "type": "VALUE_NODE"
+ , "$1":
+ { "type": "RESULT"
+ , "artifacts":
+ { "type": "map_union"
+ , "$1":
+ { "type": "foreach"
+ , "var": "d"
+ , "range": {"type": "FIELD", "name": "converter"}
+ , "body":
+ { "type": "DEP_ARTIFACTS"
+ , "dep": {"type": "var", "name": "d"}
+ }
+ }
+ }
+ , "runfiles": {"type": "empty_map"}
+ , "provides": {"type": "empty_map"}
+ }
+ }
+ ]
+ , [ "py_nodes"
+ , { "type": "++"
+ , "$1":
+ { "type": "foreach"
+ , "var": "d"
+ , "range": {"type": "FIELD", "name": "py_dicts"}
+ , "body":
+ { "type": "DEP_PROVIDES"
+ , "dep": {"type": "var", "name": "d"}
+ , "provider": "py2json"
+ , "default": {"type": "empty_map"}
+ }
+ }
+ }
+ ]
+ ]
+ , "body":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "json2yaml"
+ , "value":
+ [ { "type": "ABSTRACT_NODE"
+ , "string_fields":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "from"
+ , "value": ["json"]
+ }
+ , {"type": "singleton_map", "key": "to", "value": ["yaml"]}
+ ]
+ }
+ , "target_fields":
+ { "type": "map_union"
+ , "$1":
+ [ { "type": "singleton_map"
+ , "key": "converter"
+ , "value": [{"type": "var", "name": "converter"}]
+ }
+ , { "type": "singleton_map"
+ , "key": "dicts"
+ , "value": {"type": "var", "name": "py_nodes"}
+ }
+ ]
+ }
+ , "node_type": "convert"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ }
+}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/TARGETS b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/TARGETS
new file mode 100644
index 00000000..c4af989c
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/TARGETS
@@ -0,0 +1,5 @@
+{ "json_from_py": {"type": [".", "json_dicts"], "py_dicts": ["exported_py"]}
+, "exported_py": {"type": "export", "target": "py_dict_files"}
+, "py_dict_files":
+ {"type": [".", "py_dicts"], "py_files": ["foo.py", "bar.py"]}
+}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/bar.py b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/bar.py
new file mode 100644
index 00000000..251cbab3
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/bar.py
@@ -0,0 +1,15 @@
+# Copyright 2022 Huawei Cloud Computing Technology Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{"foo": "", None: ["bar"]}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/dict_converter.py b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/dict_converter.py
new file mode 100755
index 00000000..61cf2e8d
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/dict_converter.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python3
+# Copyright 2022 Huawei Cloud Computing Technology Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import ast
+import json
+import yaml
+
+if len(sys.argv) < 4:
+ print(f"usage: {sys.argv[0]} [py|json|yaml] [py|json|yaml] <file>")
+ sys.exit(1)
+
+with open(sys.argv[3]) as f:
+ data = {}
+ if sys.argv[1] == "py":
+ data = ast.literal_eval(f.read())
+ elif sys.argv[1] == "json":
+ data = json.load(f)
+ elif sys.argv[1] == "yaml":
+ data = yaml.load(f)
+
+ if (sys.argv[2] == "py"):
+ print(data)
+ elif sys.argv[2] == "json":
+ print(json.dumps(data, indent=2))
+ elif sys.argv[2] == "yaml":
+ print(yaml.dump(data, indent=2))
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/foo.py b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/foo.py
new file mode 100644
index 00000000..3218bb76
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/foo.py
@@ -0,0 +1,15 @@
+# Copyright 2022 Huawei Cloud Computing Technology Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{"foo": "", 0: 4711}
diff --git a/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/repos.json b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/repos.json
new file mode 100644
index 00000000..63d397d4
--- /dev/null
+++ b/test/end-to-end/target-cache/test-data-artifacts-sync/pydicts/repos.json
@@ -0,0 +1,5 @@
+{ "repositories":
+ { "main":
+ {"repository": {"type": "file", "path": ".", "pragma": {"to_git": true}}}
+ }
+}