summaryrefslogtreecommitdiff
path: root/CC/auto/runner
diff options
context:
space:
mode:
Diffstat (limited to 'CC/auto/runner')
-rwxr-xr-xCC/auto/runner162
1 files changed, 0 insertions, 162 deletions
diff --git a/CC/auto/runner b/CC/auto/runner
deleted file mode 100755
index 2da44af..0000000
--- a/CC/auto/runner
+++ /dev/null
@@ -1,162 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 Huawei Cloud Computing Technology Co., Ltd.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import re
-from sys import argv
-from typing import Union
-
-
-def get_tokens(line: str, magic_string: str) -> tuple[Union[re.Match[str], None], bool]:
- """Tokenize lines (strings) like the following
- #cmakedefine FOO bar
- #cmakedefine FOO @FOO@
- #cmakedefine FOO ${FOO}
- #cmakedefine01 FOO
-
- where "cmakedefine" is the magic_string. Let us name "FOO" as the token_key,
- and the corresponding value (i.e., bar, @FOO@, ${FOO}, the empty string) as
- the token_value. The function handles any combination of spaces around the
- magic_string, the token_key and the token_value.
- """
- x = re.search(
- r"#(.*)(" + magic_string + r"[01]*" + r")([\s]*)([a-zA-Z0-9_]+)([\s]*)(.*)",
- line,
- )
- if x:
- return x, x.groups()[1] == f"{magic_string}01"
- return None, False
-
-
-def handle01(line: str, tokens: re.Match[str], defined: bool) -> str:
- groups = tokens.groups()
- return re.sub(
- tokens.group()[1:],
- groups[0] # spaces
- + "define"
- + groups[2] # spaces
- + groups[3] # token_key
- + " "
- + str(1 if defined else 0),
- line,
- )
-
-
-def undefine(tokens: re.Match[str]) -> str:
- groups = tokens.groups()
- return "/* #" + groups[0] + "undef" + groups[2] + groups[3] + " */"
-
-
-def replace_value(tokens: re.Match[str], key: str, value: str) -> str:
- groups = tokens.groups()
- return f"#{groups[0]}define{groups[2]}{key}{groups[4]}{value}"
-
-
-def compute_value(token_value: str, at_only: bool, param: dict[str, str]):
- # example of possible token_values
- # - foo (a simple string)
- # - @FOO@
- # - ${FOO}
- # - any combination of the above
-
- def replace_pattern_in_string(
- pattern: str, line: str, param: dict[str, str]
- ) -> str:
-
- def get_value_for_match(match: re.Match[str]) -> str:
- key = match.group(1)
- return param.get(key, "")
-
- return re.sub(pattern, get_value_for_match, line)
-
- token_value = replace_pattern_in_string(r"@([A-Za-z0-9_]+)@", token_value, param)
- if at_only:
- return token_value
- return replace_pattern_in_string(r"\${([A-Za-z0-9_]+)}", token_value, param)
-
-
-if __name__ == "__main__":
- input_file = argv[1]
- param_file = argv[2]
- magic_string = argv[3]
- at_only = argv[4] == "true"
-
- with open(param_file) as f:
- param = json.loads(f.read())
-
- # In many cases, CMake simply defines some variables (without any associated
- # value). We handle this situation by assigning to the boolean True the empty
- # string. Note that no False value should be found, because the right way to set
- # a variable to False in the TARGETS file is to *do not mention* that variable
- # at all.
- # If a value is deliberately set to null, we will drop that key
- drop_keys: list[str] = []
- for k, v in param.items():
- if isinstance(v, bool):
- param[k] = ""
- if v == None:
- drop_keys.append(k)
- for k in drop_keys:
- del param[k]
-
- with open(input_file) as i:
- with open("out", "w") as o:
- for line in i.readlines():
- # drop trailing '\n'
- line = line[:-1]
-
- tokens, is_01 = get_tokens(line, magic_string)
- # no magic string
- if not tokens:
- # it can be a simple comment, or a line without the magic string but with the @KEY@ or ${KEY} pattern
- line = compute_value(line, at_only, param)
- print(line, file=o)
- continue
-
- # line contains magic_string
- groups = tokens.groups()
-
- token_key: str = groups[3]
-
- if is_01:
- line = handle01(line, tokens, token_key in param)
- print(line, file=o)
- continue
-
- if token_key not in param:
- line = undefine(tokens)
- print(line, file=o)
- continue
-
- # we are in one of this situations
- # cmakedefine FOO
- # cmakedefine FOO "foo"
- # cmakedefine FOO @FOO@${FOO}foo
-
- # i.e., the token_value can be any combination of keys (defined
- # as @key@ or ${key}) and strings
-
- # therefore, we need to further tokenize the token_value
-
- # it is convenient to first tokenize
-
- token_value: str = groups[5]
-
- value = compute_value(token_value, at_only, param)
-
- line = replace_value(tokens, token_key, value)
-
- print(line, file=o)