فهرست منبع

[xDS Proto] Modernize buildgen scripts (#25512)

* [xDS Proto] Modernize buildgen scripts

* This PR only covers some buildgen scripts that will be used by xDS
proto
* This PR also improves the debuggability of some scripts
* Merge with master

* Adopt reviewer's advices
Lidi Zheng 4 سال پیش
والد
کامیت
39cc797025

+ 27 - 45
tools/buildgen/mako_renderer.py → tools/buildgen/_mako_renderer.py

@@ -18,53 +18,61 @@ Just a wrapper around the mako rendering library.
 """
 """
 
 
 import getopt
 import getopt
+import glob
 import importlib.util
 import importlib.util
 import os
 import os
 import pickle
 import pickle
 import shutil
 import shutil
 import sys
 import sys
+from typing import List
 
 
 import yaml
 import yaml
+from mako import exceptions
 from mako.lookup import TemplateLookup
 from mako.lookup import TemplateLookup
 from mako.runtime import Context
 from mako.runtime import Context
 from mako.template import Template
 from mako.template import Template
 
 
-import bunch
+PROJECT_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..",
+                            "..")
+# TODO(lidiz) find a better way for plugins to reference each other
+sys.path.append(os.path.join(PROJECT_ROOT, 'tools', 'buildgen', 'plugins'))
 
 
 
 
-# Imports a plugin
-def import_plugin(path):
-    module_name = os.path.basename(path).replace('.py', '')
-    spec = importlib.util.spec_from_file_location(module_name, path)
-    module = importlib.util.module_from_spec(spec)
-    sys.modules[module_name] = module
-    spec.loader.exec_module(module)
-    return module
-
-
-def out(msg):
+def out(msg: str) -> None:
     print(msg, file=sys.stderr)
     print(msg, file=sys.stderr)
 
 
 
 
-def showhelp():
+def showhelp() -> None:
     out('mako-renderer.py [-o out] [-m cache] [-P preprocessed_input] [-d dict] [-d dict...]'
     out('mako-renderer.py [-o out] [-m cache] [-P preprocessed_input] [-d dict] [-d dict...]'
         ' [-t template] [-w preprocessed_output]')
         ' [-t template] [-w preprocessed_output]')
 
 
 
 
-def main(argv):
+def render_template(template: Template, context: Context) -> None:
+    """Render the mako template with given context.
+    
+    Prints an error template to indicate where and what in the template caused
+    the render failure.
+    """
+    try:
+        template.render_context(context)
+    except:
+        out(exceptions.text_error_template().render())
+        raise
+
+
+def main(argv: List[str]) -> None:
     got_input = False
     got_input = False
     module_directory = None
     module_directory = None
     preprocessed_output = None
     preprocessed_output = None
     dictionary = {}
     dictionary = {}
     json_dict = {}
     json_dict = {}
     got_output = False
     got_output = False
-    plugins = []
     output_name = None
     output_name = None
     got_preprocessed_input = False
     got_preprocessed_input = False
     output_merged = None
     output_merged = None
 
 
     try:
     try:
-        opts, args = getopt.getopt(argv, 'hM:m:d:o:p:t:P:w:')
+        opts, args = getopt.getopt(argv, 'hM:m:o:t:P:')
     except getopt.GetoptError:
     except getopt.GetoptError:
         out('Unknown option')
         out('Unknown option')
         showhelp()
         showhelp()
@@ -97,36 +105,9 @@ def main(argv):
         elif opt == '-P':
         elif opt == '-P':
             assert not got_preprocessed_input
             assert not got_preprocessed_input
             assert json_dict == {}
             assert json_dict == {}
-            sys.path.insert(
-                0,
-                os.path.abspath(
-                    os.path.join(os.path.dirname(sys.argv[0]), 'plugins')))
             with open(arg, 'rb') as dict_file:
             with open(arg, 'rb') as dict_file:
                 dictionary = pickle.load(dict_file)
                 dictionary = pickle.load(dict_file)
             got_preprocessed_input = True
             got_preprocessed_input = True
-        elif opt == '-d':
-            assert not got_preprocessed_input
-            with open(arg, 'r') as dict_file:
-                bunch.merge_json(
-                    json_dict,
-                    yaml.load(dict_file.read(), Loader=yaml.FullLoader))
-        elif opt == '-p':
-            plugins.append(import_plugin(arg))
-        elif opt == '-w':
-            preprocessed_output = arg
-
-    if not got_preprocessed_input:
-        for plugin in plugins:
-            plugin.mako_plugin(json_dict)
-        if output_merged:
-            with open(output_merged, 'w') as yaml_file:
-                yaml_file.write(yaml.dump(json_dict))
-        for k, v in json_dict.items():
-            dictionary[k] = bunch.to_bunch(v)
-
-    if preprocessed_output:
-        with open(preprocessed_output, 'wb') as dict_file:
-            pickle.dump(dictionary, dict_file)
 
 
     cleared_dir = False
     cleared_dir = False
     for arg in args:
     for arg in args:
@@ -141,7 +122,8 @@ def main(argv):
                                     module_directory=module_directory,
                                     module_directory=module_directory,
                                     lookup=TemplateLookup(directories=['.']))
                                     lookup=TemplateLookup(directories=['.']))
                 with open(output_name, 'w') as output_file:
                 with open(output_name, 'w') as output_file:
-                    template.render_context(Context(output_file, **dictionary))
+                    render_template(template, Context(output_file,
+                                                      **dictionary))
             else:
             else:
                 # we have optional control data: this template represents
                 # we have optional control data: this template represents
                 # a directory
                 # a directory
@@ -179,7 +161,7 @@ def main(argv):
                         module_directory=module_directory,
                         module_directory=module_directory,
                         lookup=TemplateLookup(directories=['.']))
                         lookup=TemplateLookup(directories=['.']))
                     with open(item_output_name, 'w') as output_file:
                     with open(item_output_name, 'w') as output_file:
-                        template.render_context(Context(output_file, **args))
+                        render_template(template, Context(output_file, **args))
 
 
     if not got_input and not preprocessed_output:
     if not got_input and not preprocessed_output:
         out('Got nothing to do')
         out('Got nothing to do')

+ 27 - 9
tools/buildgen/bunch.py → tools/buildgen/_utils.py

@@ -1,4 +1,5 @@
-# Copyright 2015 gRPC authors.
+#!/usr/bin/env python3
+# Copyright 2020 The gRPC Authors
 #
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # you may not use this file except in compliance with the License.
@@ -11,18 +12,35 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # See the License for the specific language governing permissions and
 # limitations under the License.
 # limitations under the License.
-"""Allows dot-accessible dictionaries."""
+"""Utility functions for build file generation scripts."""
+
+import os
+import sys
+import types
+import importlib.util
+from typing import Any, Union, Mapping, List
+
+
+def import_python_module(path: str) -> types.ModuleType:
+    """Imports the Python file at the given path, returns a module object."""
+    module_name = os.path.basename(path).replace('.py', '')
+    spec = importlib.util.spec_from_file_location(module_name, path)
+    module = importlib.util.module_from_spec(spec)
+    sys.modules[module_name] = module
+    spec.loader.exec_module(module)
+    return module
 
 
 
 
 class Bunch(dict):
 class Bunch(dict):
+    """Allows dot-accessible dictionaries."""
 
 
-    def __init__(self, d):
+    def __init__(self, d: Mapping):
         dict.__init__(self, d)
         dict.__init__(self, d)
         self.__dict__.update(d)
         self.__dict__.update(d)
 
 
 
 
-# Converts any kind of variable to a Bunch
-def to_bunch(var):
+def to_bunch(var: Any) -> Any:
+    """Converts any kind of variable to a Bunch."""
     if isinstance(var, list):
     if isinstance(var, list):
         return [to_bunch(i) for i in var]
         return [to_bunch(i) for i in var]
     if isinstance(var, dict):
     if isinstance(var, dict):
@@ -36,12 +54,12 @@ def to_bunch(var):
         return var
         return var
 
 
 
 
-# Merges JSON 'add' into JSON 'dst'
-def merge_json(dst, add):
+def merge_json(dst: Union[Mapping, List], add: Union[Mapping, List]) -> None:
+    """Merges JSON objects recursively."""
     if isinstance(dst, dict) and isinstance(add, dict):
     if isinstance(dst, dict) and isinstance(add, dict):
         for k, v in add.items():
         for k, v in add.items():
             if k in dst:
             if k in dst:
-                if k == '#':
+                if k.startswith('#'):
                     continue
                     continue
                 merge_json(dst[k], v)
                 merge_json(dst[k], v)
             else:
             else:
@@ -49,6 +67,6 @@ def merge_json(dst, add):
     elif isinstance(dst, list) and isinstance(add, list):
     elif isinstance(dst, list) and isinstance(add, list):
         dst.extend(add)
         dst.extend(add)
     else:
     else:
-        raise Exception(
+        raise TypeError(
             'Tried to merge incompatible objects %s %s\n\n%r\n\n%r' %
             'Tried to merge incompatible objects %s %s\n\n%r\n\n%r' %
             (type(dst).__name__, type(add).__name__, dst, add))
             (type(dst).__name__, type(add).__name__, dst, add))

+ 1 - 1
tools/buildgen/generate_build_additions.sh

@@ -31,7 +31,7 @@ gen_build_yaml_dirs="  \
 gen_build_files=""
 gen_build_files=""
 for gen_build_yaml in $gen_build_yaml_dirs
 for gen_build_yaml in $gen_build_yaml_dirs
 do
 do
-  output_file=`mktemp /tmp/genXXXXXX`
+  output_file=$(mktemp /tmp/gen_$(echo $gen_build_yaml | tr '/' '_').yaml.XXXXX)
   python3 $gen_build_yaml/gen_build_yaml.py > $output_file
   python3 $gen_build_yaml/gen_build_yaml.py > $output_file
   gen_build_files="$gen_build_files $output_file"
   gen_build_files="$gen_build_files $output_file"
 done
 done

+ 115 - 83
tools/buildgen/generate_projects.py

@@ -14,99 +14,131 @@
 
 
 import argparse
 import argparse
 import glob
 import glob
+import yaml
+import pickle
 import os
 import os
 import shutil
 import shutil
 import sys
 import sys
 import tempfile
 import tempfile
 import multiprocessing
 import multiprocessing
-sys.path.append(
-    os.path.join(os.path.dirname(sys.argv[0]), '..', 'run_tests',
-                 'python_utils'))
+from typing import Union, Dict, List
+
+import _utils
+
+PROJECT_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..",
+                            "..")
+os.chdir(PROJECT_ROOT)
+# TODO(lidiz) find a better way for plugins to reference each other
+sys.path.append(os.path.join(PROJECT_ROOT, 'tools', 'buildgen', 'plugins'))
+
+# from tools.run_tests.python_utils import jobset
+jobset = _utils.import_python_module(
+    os.path.join(PROJECT_ROOT, 'tools', 'run_tests', 'python_utils',
+                 'jobset.py'))
+
+PREPROCESSED_BUILD = '.preprocessed_build'
+test = {} if os.environ.get('TEST', 'false') == 'true' else None
 
 
 assert sys.argv[1:], 'run generate_projects.sh instead of this directly'
 assert sys.argv[1:], 'run generate_projects.sh instead of this directly'
+parser = argparse.ArgumentParser()
+parser.add_argument('build_files',
+                    nargs='+',
+                    default=[],
+                    help="build files describing build specs")
+parser.add_argument('--templates',
+                    nargs='+',
+                    default=[],
+                    help="mako template files to render")
+parser.add_argument('--output_merged',
+                    '-m',
+                    default='',
+                    type=str,
+                    help="merge intermediate results to a file")
+parser.add_argument('--jobs',
+                    '-j',
+                    default=multiprocessing.cpu_count(),
+                    type=int,
+                    help="maximum parallel jobs")
+parser.add_argument('--base',
+                    default='.',
+                    type=str,
+                    help="base path for generated files")
+args = parser.parse_args()
 
 
-import jobset
 
 
-os.chdir(os.path.join(os.path.dirname(sys.argv[0]), '..', '..'))
+def preprocess_build_files() -> _utils.Bunch:
+    """Merges build yaml into a one dictionary then pass it to plugins."""
+    build_spec = dict()
+    for build_file in args.build_files:
+        with open(build_file, 'r') as f:
+            _utils.merge_json(build_spec,
+                              yaml.load(f.read(), Loader=yaml.FullLoader))
+    # Executes plugins. Plugins update the build spec in-place.
+    for py_file in sorted(glob.glob('tools/buildgen/plugins/*.py')):
+        plugin = _utils.import_python_module(py_file)
+        plugin.mako_plugin(build_spec)
+    if args.output_merged:
+        with open(args.output_merged, 'w') as f:
+            f.write(yaml.dump(build_spec))
+    # Makes build_spec sort of immutable and dot-accessible
+    return _utils.to_bunch(build_spec)
 
 
-argp = argparse.ArgumentParser()
-argp.add_argument('build_files', nargs='+', default=[])
-argp.add_argument('--templates', nargs='+', default=[])
-argp.add_argument('--output_merged', default=None, type=str)
-argp.add_argument('--jobs', '-j', default=multiprocessing.cpu_count(), type=int)
-argp.add_argument('--base', default='.', type=str)
-args = argp.parse_args()
 
 
-json = args.build_files
+def generate_template_render_jobs(templates: List[str]) -> List[jobset.JobSpec]:
+    """Generate JobSpecs for each one of the template rendering work."""
+    jobs = []
+    base_cmd = [sys.executable, 'tools/buildgen/_mako_renderer.py']
+    for template in sorted(templates, reverse=True):
+        root, f = os.path.split(template)
+        if os.path.splitext(f)[1] == '.template':
+            out_dir = args.base + root[len('templates'):]
+            out = os.path.join(out_dir, os.path.splitext(f)[0])
+            if not os.path.exists(out_dir):
+                os.makedirs(out_dir)
+            cmd = base_cmd[:]
+            cmd.append('-P')
+            cmd.append(PREPROCESSED_BUILD)
+            cmd.append('-o')
+            if test is None:
+                cmd.append(out)
+            else:
+                tf = tempfile.mkstemp()
+                test[out] = tf[1]
+                os.close(tf[0])
+                cmd.append(test[out])
+            cmd.append(args.base + '/' + root + '/' + f)
+            jobs.append(jobset.JobSpec(cmd, shortname=out,
+                                       timeout_seconds=None))
+    return jobs
+
+
+def main() -> None:
+    templates = args.templates
+    if not templates:
+        for root, _, files in os.walk('templates'):
+            for f in files:
+                templates.append(os.path.join(root, f))
+
+    build_spec = preprocess_build_files()
+    with open(PREPROCESSED_BUILD, 'wb') as f:
+        pickle.dump(build_spec, f)
+
+    err_cnt, _ = jobset.run(generate_template_render_jobs(templates),
+                            maxjobs=args.jobs)
+    if err_cnt != 0:
+        print(f'ERROR: {err_cnt} error(s) found while generating projects.',
+              file=sys.stderr)
+        sys.exit(1)
+
+    if test is not None:
+        for s, g in test.items():
+            if os.path.isfile(g):
+                assert 0 == os.system('diff %s %s' % (s, g)), s
+                os.unlink(g)
+            else:
+                assert 0 == os.system('diff -r %s %s' % (s, g)), s
+                shutil.rmtree(g, ignore_errors=True)
 
 
-test = {} if os.environ.get('TEST', 'false') == 'true' else None
 
 
-plugins = sorted(glob.glob('tools/buildgen/plugins/*.py'))
-
-templates = args.templates
-if not templates:
-    for root, dirs, files in os.walk('templates'):
-        for f in files:
-            templates.append(os.path.join(root, f))
-
-pre_jobs = []
-base_cmd = [sys.executable, 'tools/buildgen/mako_renderer.py']
-cmd = base_cmd[:]
-for plugin in plugins:
-    cmd.append('-p')
-    cmd.append(plugin)
-for js in json:
-    cmd.append('-d')
-    cmd.append(js)
-cmd.append('-w')
-preprocessed_build = '.preprocessed_build'
-cmd.append(preprocessed_build)
-if args.output_merged is not None:
-    cmd.append('-M')
-    cmd.append(args.output_merged)
-pre_jobs.append(
-    jobset.JobSpec(cmd, shortname='preprocess', timeout_seconds=None))
-
-jobs = []
-for template in reversed(sorted(templates)):
-    root, f = os.path.split(template)
-    if os.path.splitext(f)[1] == '.template':
-        out_dir = args.base + root[len('templates'):]
-        out = out_dir + '/' + os.path.splitext(f)[0]
-        if not os.path.exists(out_dir):
-            os.makedirs(out_dir)
-        cmd = base_cmd[:]
-        cmd.append('-P')
-        cmd.append(preprocessed_build)
-        cmd.append('-o')
-        if test is None:
-            cmd.append(out)
-        else:
-            tf = tempfile.mkstemp()
-            test[out] = tf[1]
-            os.close(tf[0])
-            cmd.append(test[out])
-        cmd.append(args.base + '/' + root + '/' + f)
-        jobs.append(jobset.JobSpec(cmd, shortname=out, timeout_seconds=None))
-
-err_cnt, _ = jobset.run(pre_jobs, maxjobs=args.jobs)
-if err_cnt != 0:
-    print('ERROR: {count} error(s) encountered during preprocessing.'.format(
-        count=err_cnt),
-          file=sys.stderr)
-    sys.exit(1)
-err_cnt, _ = jobset.run(jobs, maxjobs=args.jobs)
-if err_cnt != 0:
-    print('ERROR: {count} error(s) found while generating projects.'.format(
-        count=err_cnt),
-          file=sys.stderr)
-    sys.exit(1)
-
-if test is not None:
-    for s, g in test.items():
-        if os.path.isfile(g):
-            assert 0 == os.system('diff %s %s' % (s, g)), s
-            os.unlink(g)
-        else:
-            assert 0 == os.system('diff -r %s %s' % (s, g)), s
-            shutil.rmtree(g, ignore_errors=True)
+if __name__ == "__main__":
+    main()

+ 1 - 2
tools/buildgen/generate_projects.sh

@@ -29,7 +29,6 @@ rm -f build_autogenerated.yaml
 python3 tools/buildgen/extract_metadata_from_bazel_xml.py
 python3 tools/buildgen/extract_metadata_from_bazel_xml.py
 
 
 cd `dirname $0`/../..
 cd `dirname $0`/../..
-mako_renderer=tools/buildgen/mako_renderer.py
 
 
 tools/buildgen/build_cleaner.py build_handwritten.yaml
 tools/buildgen/build_cleaner.py build_handwritten.yaml
 
 
@@ -41,6 +40,6 @@ TEST=true tools/buildgen/build_cleaner.py build_autogenerated.yaml
 # Instead of generating from a single build.yaml, we've split it into
 # Instead of generating from a single build.yaml, we've split it into
 # - build_handwritten.yaml: manually written metadata
 # - build_handwritten.yaml: manually written metadata
 # - build_autogenerated.yaml: generated from bazel BUILD file
 # - build_autogenerated.yaml: generated from bazel BUILD file
-python3 tools/buildgen/generate_projects.py build_handwritten.yaml build_autogenerated.yaml $gen_build_files $*
+python3 tools/buildgen/generate_projects.py build_handwritten.yaml build_autogenerated.yaml $gen_build_files "$@"
 
 
 rm $gen_build_files
 rm $gen_build_files

+ 8 - 5
tools/run_tests/python_utils/jobset.py

@@ -130,15 +130,15 @@ def message(tag, msg, explanatory_text=None, do_newline=False):
         try:
         try:
             if platform_string() == 'windows' or not sys.stdout.isatty():
             if platform_string() == 'windows' or not sys.stdout.isatty():
                 if explanatory_text:
                 if explanatory_text:
-                    logging.info(explanatory_text)
+                    logging.info(explanatory_text.decode('utf8'))
                 logging.info('%s: %s', tag, msg)
                 logging.info('%s: %s', tag, msg)
             else:
             else:
                 sys.stdout.write(
                 sys.stdout.write(
                     '%s%s%s\x1b[%d;%dm%s\x1b[0m: %s%s' %
                     '%s%s%s\x1b[%d;%dm%s\x1b[0m: %s%s' %
                     (_BEGINNING_OF_LINE, _CLEAR_LINE, '\n%s' %
                     (_BEGINNING_OF_LINE, _CLEAR_LINE, '\n%s' %
-                     explanatory_text if explanatory_text is not None else '',
-                     _COLORS[_TAG_COLOR[tag]][1], _COLORS[_TAG_COLOR[tag]][0],
-                     tag, msg, '\n'
+                     explanatory_text.decode('utf8') if explanatory_text
+                     is not None else '', _COLORS[_TAG_COLOR[tag]][1],
+                     _COLORS[_TAG_COLOR[tag]][0], tag, msg, '\n'
                      if do_newline or explanatory_text is not None else ''))
                      if do_newline or explanatory_text is not None else ''))
             sys.stdout.flush()
             sys.stdout.flush()
             return
             return
@@ -277,7 +277,10 @@ class Job(object):
                 os.makedirs(logfile_dir)
                 os.makedirs(logfile_dir)
             self._logfile = open(self._spec.logfilename, 'w+')
             self._logfile = open(self._spec.logfilename, 'w+')
         else:
         else:
-            self._logfile = tempfile.TemporaryFile()
+            # macOS: a series of quick os.unlink invocation might cause OS
+            # error during the creation of temporary file. By using
+            # NamedTemporaryFile, we defer the removal of file and directory.
+            self._logfile = tempfile.NamedTemporaryFile()
         env = dict(os.environ)
         env = dict(os.environ)
         env.update(self._spec.environ)
         env.update(self._spec.environ)
         env.update(self._add_env)
         env.update(self._add_env)