extract_metadata_from_bazel_xml.py
Go to the documentation of this file.
1 #!/usr/bin/env python3
2 # Copyright 2020 The gRPC Authors
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15 
16 # Script to extract build metadata from bazel BUILD.
17 # To avoid having two sources of truth for the build metadata (build
18 # targets, source files, header files etc.), this script analyzes the contents
19 # of bazel BUILD files and generates a YAML file (currently called
20 # build_autogenerated.yaml). The format and semantics of the generated YAML files
21 # is chosen to match the format of a "build.yaml" file, which used
22 # to be build the source of truth for gRPC build before bazel became
23 # the primary build system.
24 # A good basic overview of the "build.yaml" format is available here:
25 # https://github.com/grpc/grpc/blob/master/templates/README.md. Note that
26 # while useful as an overview, the doc does not act as formal spec
27 # (formal spec does not exist in fact) and the doc can be incomplete,
28 # inaccurate or slightly out of date.
29 # TODO(jtattermusch): In the future we want to get rid of the legacy build.yaml
30 # format entirely or simplify it to a point where it becomes self-explanatory
31 # and doesn't need any detailed documentation.
32 
33 import collections
34 import os
35 import subprocess
36 from typing import Any, Dict, Iterable, List, Optional
37 import xml.etree.ElementTree as ET
38 
39 import build_cleaner
40 
41 BuildMetadata = Dict[str, Any]
42 BuildDict = Dict[str, BuildMetadata]
43 BuildYaml = Dict[str, Any]
44 
45 BuildMetadata = Dict[str, Any]
46 BuildDict = Dict[str, BuildMetadata]
47 BuildYaml = Dict[str, Any]
48 
49 
51  """ExternalProtoLibrary is the struct about an external proto library.
52 
53  Fields:
54  - destination(int): The relative path of this proto library should be.
55  Preferably, it should match the submodule path.
56  - proto_prefix(str): The prefix to remove in order to insure the proto import
57  is correct. For more info, see description of
58  https://github.com/grpc/grpc/pull/25272.
59  - urls(List[str]): Following 3 fields should be filled by build metadata from
60  Bazel.
61  - hash(str): The hash of the downloaded archive
62  - strip_prefix(str): The path to be stripped from the extracted directory, see
63  http_archive in Bazel.
64  """
65 
66  def __init__(self,
67  destination,
68  proto_prefix,
69  urls=None,
70  hash="",
71  strip_prefix=""):
72  self.destination = destination
73  self.proto_prefix = proto_prefix
74  if urls is None:
75  self.urls = []
76  else:
77  self.urls = urls
78  self.hash = hash
79  self.strip_prefix = strip_prefix
80 
81 
82 EXTERNAL_PROTO_LIBRARIES = {
83  'envoy_api':
84  ExternalProtoLibrary(destination='third_party/envoy-api',
85  proto_prefix='third_party/envoy-api/'),
86  'com_google_googleapis':
87  ExternalProtoLibrary(destination='third_party/googleapis',
88  proto_prefix='third_party/googleapis/'),
89  'com_github_cncf_udpa':
90  ExternalProtoLibrary(destination='third_party/xds',
91  proto_prefix='third_party/xds/'),
92  'opencensus_proto':
93  ExternalProtoLibrary(destination='third_party/opencensus-proto/src',
94  proto_prefix='third_party/opencensus-proto/src/'),
95 }
96 
97 
98 def _maybe_get_internal_path(name: str) -> Optional[str]:
99  for key in EXTERNAL_PROTO_LIBRARIES:
100  if name.startswith('@' + key):
101  return key
102  return None
103 
104 
105 def _bazel_query_xml_tree(query: str) -> ET.Element:
106  """Get xml output of bazel query invocation, parsed as XML tree"""
107  output = subprocess.check_output(
108  ['tools/bazel', 'query', '--noimplicit_deps', '--output', 'xml', query])
109  return ET.fromstring(output)
110 
111 
112 def _rule_dict_from_xml_node(rule_xml_node):
113  """Converts XML node representing a rule (obtained from "bazel query --output xml") to a dictionary that contains all the metadata we will need."""
114  result = {
115  'class': rule_xml_node.attrib.get('class'),
116  'name': rule_xml_node.attrib.get('name'),
117  'srcs': [],
118  'hdrs': [],
119  'deps': [],
120  'data': [],
121  'tags': [],
122  'args': [],
123  'generator_function': None,
124  'size': None,
125  'flaky': False,
126  }
127  for child in rule_xml_node:
128  # all the metadata we want is stored under "list" tags
129  if child.tag == 'list':
130  list_name = child.attrib['name']
131  if list_name in ['srcs', 'hdrs', 'deps', 'data', 'tags', 'args']:
132  result[list_name] += [item.attrib['value'] for item in child]
133  if child.tag == 'string':
134  string_name = child.attrib['name']
135  if string_name in ['generator_function', 'size']:
136  result[string_name] = child.attrib['value']
137  if child.tag == 'boolean':
138  bool_name = child.attrib['name']
139  if bool_name in ['flaky']:
140  result[bool_name] = child.attrib['value'] == 'true'
141  return result
142 
143 
145  """Extract bazel rules from an XML tree node obtained from "bazel query --output xml" command."""
146  result = {}
147  for child in xml_tree:
148  if child.tag == 'rule':
149  rule_dict = _rule_dict_from_xml_node(child)
150  rule_clazz = rule_dict['class']
151  rule_name = rule_dict['name']
152  if rule_clazz in [
153  'cc_library',
154  'cc_binary',
155  'cc_test',
156  'cc_proto_library',
157  'cc_proto_gen_validate',
158  'proto_library',
159  'upb_proto_library',
160  'upb_proto_reflection_library',
161  ]:
162  if rule_name in result:
163  raise Exception('Rule %s already present' % rule_name)
164  result[rule_name] = rule_dict
165  return result
166 
167 
168 def _get_bazel_label(target_name: str) -> str:
169  if target_name.startswith('@'):
170  return target_name
171  if ':' in target_name:
172  return '//%s' % target_name
173  else:
174  return '//:%s' % target_name
175 
176 
177 def _extract_source_file_path(label: str) -> str:
178  """Gets relative path to source file from bazel deps listing"""
179  if label.startswith('//'):
180  label = label[len('//'):]
181  # labels in form //:src/core/lib/surface/call_test_only.h
182  if label.startswith(':'):
183  label = label[len(':'):]
184  # labels in form //test/core/util:port.cc
185  label = label.replace(':', '/')
186  return label
187 
188 
189 def _extract_public_headers(bazel_rule: BuildMetadata) -> List[str]:
190  """Gets list of public headers from a bazel rule"""
191  result = []
192  for dep in bazel_rule['hdrs']:
193  if dep.startswith('//:include/') and dep.endswith('.h'):
194  result.append(_extract_source_file_path(dep))
195  return list(sorted(result))
196 
197 
198 def _extract_nonpublic_headers(bazel_rule: BuildMetadata) -> List[str]:
199  """Gets list of non-public headers from a bazel rule"""
200  result = []
201  for dep in bazel_rule['hdrs']:
202  if dep.startswith('//') and not dep.startswith(
203  '//:include/') and dep.endswith('.h'):
204  result.append(_extract_source_file_path(dep))
205  return list(sorted(result))
206 
207 
208 def _extract_sources(bazel_rule: BuildMetadata) -> List[str]:
209  """Gets list of source files from a bazel rule"""
210  result = []
211  for src in bazel_rule['srcs']:
212  if src.endswith('.cc') or src.endswith('.c') or src.endswith('.proto'):
213  if src.startswith('//'):
214  # This source file is local to gRPC
215  result.append(_extract_source_file_path(src))
216  else:
217  # This source file is external, and we need to translate the
218  # @REPO_NAME to a valid path prefix. At this stage, we need
219  # to check repo name, since the label/path mapping is not
220  # available in BUILD files.
221  external_proto_library_name = _maybe_get_internal_path(src)
222  if external_proto_library_name is not None:
223  result.append(
224  src.replace(
225  '@%s//' % external_proto_library_name,
226  EXTERNAL_PROTO_LIBRARIES[
227  external_proto_library_name].proto_prefix).
228  replace(':', '/'))
229  return list(sorted(result))
230 
231 
232 def _extract_deps(bazel_rule: BuildMetadata,
233  bazel_rules: BuildDict) -> List[str]:
234  """Gets list of deps from from a bazel rule"""
235  deps = set(bazel_rule['deps'])
236  for src in bazel_rule['srcs']:
237  if not src.endswith('.cc') and not src.endswith(
238  '.c') and not src.endswith('.proto'):
239  if src in bazel_rules:
240  # This label doesn't point to a source file, but another Bazel
241  # target. This is required for :pkg_cc_proto_validate targets,
242  # and it's generally allowed by Bazel.
243  deps.add(src)
244  return list(sorted(list(deps)))
245 
246 
247 def _create_target_from_bazel_rule(target_name: str,
248  bazel_rules: BuildDict) -> BuildMetadata:
249  """Create build.yaml-like target definition from bazel metadata"""
250  bazel_rule = bazel_rules[_get_bazel_label(target_name)]
251 
252  # Create a template for our target from the bazel rule. Initially we only
253  # populate some "private" fields with the original info we got from bazel
254  # and only later we will populate the public fields (once we do some extra
255  # postprocessing).
256  result = {
257  'name': target_name,
258  '_PUBLIC_HEADERS_BAZEL': _extract_public_headers(bazel_rule),
259  '_HEADERS_BAZEL': _extract_nonpublic_headers(bazel_rule),
260  '_SRC_BAZEL': _extract_sources(bazel_rule),
261  '_DEPS_BAZEL': _extract_deps(bazel_rule, bazel_rules),
262  'public_headers': bazel_rule['_COLLAPSED_PUBLIC_HEADERS'],
263  'headers': bazel_rule['_COLLAPSED_HEADERS'],
264  'src': bazel_rule['_COLLAPSED_SRCS'],
265  'deps': bazel_rule['_COLLAPSED_DEPS'],
266  }
267  return result
268 
269 
270 def _external_dep_name_from_bazel_dependency(bazel_dep: str) -> Optional[str]:
271  """Returns name of dependency if external bazel dependency is provided or None"""
272  if bazel_dep.startswith('@com_google_absl//'):
273  # special case for add dependency on one of the absl libraries (there is not just one absl library)
274  prefixlen = len('@com_google_absl//')
275  return bazel_dep[prefixlen:]
276  elif bazel_dep == '//external:upb_lib':
277  return 'upb'
278  elif bazel_dep == '//external:benchmark':
279  return 'benchmark'
280  elif bazel_dep == '//external:libssl':
281  return 'libssl'
282  else:
283  # all the other external deps such as protobuf, cares, zlib
284  # don't need to be listed explicitly, they are handled automatically
285  # by the build system (make, cmake)
286  return None
287 
288 
290  rule_name: str, bazel_rules: Any,
291  bazel_label_to_dep_name: Dict[str, str]) -> None:
292  """Computes the final build metadata for Bazel target with rule_name.
293 
294  The dependencies that will appear on the deps list are:
295 
296  * Public build targets including binaries and tests;
297  * External targets, like absl, re2.
298 
299  All other intermediate dependencies will be merged, which means their
300  source file, headers, etc. will be collected into one build target. This
301  step of processing will greatly reduce the complexity of the generated
302  build specifications for other build systems, like CMake, Make, setuptools.
303 
304  The final build metadata are:
305  * _TRANSITIVE_DEPS: all the transitive dependencies including intermediate
306  targets;
307  * _COLLAPSED_DEPS: dependencies that fits our requirement above, and it
308  will remove duplicated items and produce the shortest
309  possible dependency list in alphabetical order;
310  * _COLLAPSED_SRCS: the merged source files;
311  * _COLLAPSED_PUBLIC_HEADERS: the merged public headers;
312  * _COLLAPSED_HEADERS: the merged non-public headers;
313  * _EXCLUDE_DEPS: intermediate targets to exclude when performing collapsing
314  of sources and dependencies.
315 
316  For the collapsed_deps, the algorithm improved cases like:
317 
318  The result in the past:
319  end2end_tests -> [grpc_test_util, grpc, gpr, address_sorting, upb]
320  grpc_test_util -> [grpc, gpr, address_sorting, upb, ...]
321  grpc -> [gpr, address_sorting, upb, ...]
322 
323  The result of the algorithm:
324  end2end_tests -> [grpc_test_util]
325  grpc_test_util -> [grpc]
326  grpc -> [gpr, address_sorting, upb, ...]
327  """
328  bazel_rule = bazel_rules[rule_name]
329  direct_deps = _extract_deps(bazel_rule, bazel_rules)
330  transitive_deps = set()
331  collapsed_deps = set()
332  exclude_deps = set()
333  collapsed_srcs = set(_extract_sources(bazel_rule))
334  collapsed_public_headers = set(_extract_public_headers(bazel_rule))
335  collapsed_headers = set(_extract_nonpublic_headers(bazel_rule))
336 
337  for dep in direct_deps:
338  external_dep_name_maybe = _external_dep_name_from_bazel_dependency(dep)
339 
340  if dep in bazel_rules:
341  # Descend recursively, but no need to do that for external deps
342  if external_dep_name_maybe is None:
343  if "_PROCESSING_DONE" not in bazel_rules[dep]:
344  # This item is not processed before, compute now
345  _compute_transitive_metadata(dep, bazel_rules,
346  bazel_label_to_dep_name)
347  transitive_deps.update(bazel_rules[dep].get(
348  '_TRANSITIVE_DEPS', []))
349  collapsed_deps.update(
350  collapsed_deps, bazel_rules[dep].get('_COLLAPSED_DEPS', []))
351  exclude_deps.update(bazel_rules[dep].get('_EXCLUDE_DEPS', []))
352 
353  # This dep is a public target, add it as a dependency
354  if dep in bazel_label_to_dep_name:
355  transitive_deps.update([bazel_label_to_dep_name[dep]])
356  collapsed_deps.update(collapsed_deps,
357  [bazel_label_to_dep_name[dep]])
358  # Add all the transitive deps of our every public dep to exclude
359  # list since we want to avoid building sources that are already
360  # built by our dependencies
361  exclude_deps.update(bazel_rules[dep]['_TRANSITIVE_DEPS'])
362  continue
363 
364  # This dep is an external target, add it as a dependency
365  if external_dep_name_maybe is not None:
366  transitive_deps.update([external_dep_name_maybe])
367  collapsed_deps.update(collapsed_deps, [external_dep_name_maybe])
368  continue
369 
370  # Direct dependencies are part of transitive dependencies
371  transitive_deps.update(direct_deps)
372 
373  # Calculate transitive public deps (needed for collapsing sources)
374  transitive_public_deps = set(
375  [x for x in transitive_deps if x in bazel_label_to_dep_name])
376 
377  # Remove intermediate targets that our public dependencies already depend
378  # on. This is the step that further shorten the deps list.
379  collapsed_deps = set([x for x in collapsed_deps if x not in exclude_deps])
380 
381  # Compute the final source files and headers for this build target whose
382  # name is `rule_name` (input argument of this function).
383  #
384  # Imaging a public target PX has transitive deps [IA, IB, PY, IC, PZ]. PX,
385  # PY and PZ are public build targets. And IA, IB, IC are intermediate
386  # targets. In addition, PY depends on IC.
387  #
388  # Translate the condition into dependency graph:
389  # PX -> [IA, IB, PY, IC, PZ]
390  # PY -> [IC]
391  # Public targets: [PX, PY, PZ]
392  #
393  # The collapsed dependencies of PX: [PY, PZ].
394  # The excluded dependencies of X: [PY, IC, PZ].
395  # (IC is excluded as a dependency of PX. It is already included in PY, hence
396  # it would be redundant to include it again.)
397  #
398  # Target PX should include source files and headers of [PX, IA, IB] as final
399  # build metadata.
400  for dep in transitive_deps:
401  if dep not in exclude_deps and dep not in transitive_public_deps:
402  if dep in bazel_rules:
403  collapsed_srcs.update(_extract_sources(bazel_rules[dep]))
404  collapsed_public_headers.update(
405  _extract_public_headers(bazel_rules[dep]))
406  collapsed_headers.update(
407  _extract_nonpublic_headers(bazel_rules[dep]))
408  # This item is a "visited" flag
409  bazel_rule['_PROCESSING_DONE'] = True
410  # Following items are described in the docstinrg.
411  bazel_rule['_TRANSITIVE_DEPS'] = list(sorted(transitive_deps))
412  bazel_rule['_COLLAPSED_DEPS'] = list(sorted(collapsed_deps))
413  bazel_rule['_COLLAPSED_SRCS'] = list(sorted(collapsed_srcs))
414  bazel_rule['_COLLAPSED_PUBLIC_HEADERS'] = list(
415  sorted(collapsed_public_headers))
416  bazel_rule['_COLLAPSED_HEADERS'] = list(sorted(collapsed_headers))
417  bazel_rule['_EXCLUDE_DEPS'] = list(sorted(exclude_deps))
418 
419 
420 # TODO(jtattermusch): deduplicate with transitive_dependencies.py (which has a
421 # slightly different logic)
422 # TODO(jtattermusch): This is done to avoid introducing too many intermediate
423 # libraries into the build.yaml-based builds (which might in cause issues
424 # building language-specific artifacts) and also because the libraries in
425 # build.yaml-based build are generally considered units of distributions (=
426 # public libraries that are visible to the user and are installable), while in
427 # bazel builds it is customary to define larger number of smaller
428 # "sublibraries". The need for elision (and expansion) of intermediate libraries
429 # can be re-evaluated in the future.
430 def _populate_transitive_metadata(bazel_rules: Any,
431  public_dep_names: Iterable[str]) -> None:
432  """Add 'transitive_deps' field for each of the rules"""
433  # Create the map between Bazel label and public dependency name
434  bazel_label_to_dep_name = {}
435  for dep_name in public_dep_names:
436  bazel_label_to_dep_name[_get_bazel_label(dep_name)] = dep_name
437 
438  # Make sure we reached all the Bazel rules
439  # TODO(lidiz) potentially we could only update a subset of rules
440  for rule_name in bazel_rules:
441  if '_PROCESSING_DONE' not in bazel_rules[rule_name]:
442  _compute_transitive_metadata(rule_name, bazel_rules,
443  bazel_label_to_dep_name)
444 
445 
447  all_extra_metadata: BuildDict, bazel_rules: BuildDict) -> None:
448  """Patches test build metadata with transitive metadata."""
449  for lib_name, lib_dict in list(all_extra_metadata.items()):
450  # Skip if it isn't not an test
451  if lib_dict.get('build') != 'test' or lib_dict.get('_TYPE') != 'target':
452  continue
453 
454  bazel_rule = bazel_rules[_get_bazel_label(lib_name)]
455 
456  if '//external:benchmark' in bazel_rule['_TRANSITIVE_DEPS']:
457  lib_dict['benchmark'] = True
458  lib_dict['defaults'] = 'benchmark'
459 
460  if '//external:gtest' in bazel_rule['_TRANSITIVE_DEPS']:
461  lib_dict['gtest'] = True
462  lib_dict['language'] = 'c++'
463 
464 
465 def _get_transitive_protos(bazel_rules, t):
466  que = [
467  t,
468  ]
469  visited = set()
470  ret = []
471  while que:
472  name = que.pop(0)
473  rule = bazel_rules.get(name, None)
474  if rule:
475  for dep in rule['deps']:
476  if dep not in visited:
477  visited.add(dep)
478  que.append(dep)
479  for src in rule['srcs']:
480  if src.endswith('.proto'):
481  ret.append(src)
482  return list(set(ret))
483 
484 
486  # Expand the .proto files from UPB proto library rules into the pre-generated
487  # upb.h and upb.c files.
488  GEN_UPB_ROOT = '//:src/core/ext/upb-generated/'
489  GEN_UPBDEFS_ROOT = '//:src/core/ext/upbdefs-generated/'
490  EXTERNAL_LINKS = [('@com_google_protobuf//', ':src/'),
491  ('@com_google_googleapis//', ''),
492  ('@com_github_cncf_udpa//', ''),
493  ('@com_envoyproxy_protoc_gen_validate//', ''),
494  ('@envoy_api//', ''), ('@opencensus_proto//', '')]
495  for name, bazel_rule in bazel_rules.items():
496  gen_func = bazel_rule.get('generator_function', None)
497  if gen_func in ('grpc_upb_proto_library',
498  'grpc_upb_proto_reflection_library'):
499  # get proto dependency
500  deps = bazel_rule['deps']
501  if len(deps) != 1:
502  raise Exception(
503  'upb rule "{0}" should have 1 proto dependency but has "{1}"'
504  .format(name, deps))
505  # deps is not properly fetched from bazel query for upb_proto_library target
506  # so add the upb dependency manually
507  bazel_rule['deps'] = [
508  '//external:upb_lib', '//external:upb_lib_descriptor',
509  '//external:upb_generated_code_support__only_for_generated_code_do_not_use__i_give_permission_to_break_me'
510  ]
511  # populate the upb_proto_library rule with pre-generated upb headers
512  # and sources using proto_rule
513  protos = _get_transitive_protos(bazel_rules, deps[0])
514  if len(protos) == 0:
515  raise Exception(
516  'upb rule "{0}" should have at least one proto file.'.
517  format(name))
518  srcs = []
519  hdrs = []
520  for proto_src in protos:
521  for external_link in EXTERNAL_LINKS:
522  if proto_src.startswith(external_link[0]):
523  proto_src = proto_src[len(external_link[0]) +
524  len(external_link[1]):]
525  break
526  if proto_src.startswith('@'):
527  raise Exception('"{0}" is unknown workspace.'.format(name))
528  proto_src = _extract_source_file_path(proto_src)
529  ext = '.upb' if gen_func == 'grpc_upb_proto_library' else '.upbdefs'
530  root = GEN_UPB_ROOT if gen_func == 'grpc_upb_proto_library' else GEN_UPBDEFS_ROOT
531  srcs.append(root + proto_src.replace('.proto', ext + '.c'))
532  hdrs.append(root + proto_src.replace('.proto', ext + '.h'))
533  bazel_rule['srcs'] = srcs
534  bazel_rule['hdrs'] = hdrs
535 
536 
537 def _generate_build_metadata(build_extra_metadata: BuildDict,
538  bazel_rules: BuildDict) -> BuildDict:
539  """Generate build metadata in build.yaml-like format bazel build metadata and build.yaml-specific "extra metadata"."""
540  lib_names = list(build_extra_metadata.keys())
541  result = {}
542 
543  for lib_name in lib_names:
544  lib_dict = _create_target_from_bazel_rule(lib_name, bazel_rules)
545 
546  # populate extra properties from the build.yaml-specific "extra metadata"
547  lib_dict.update(build_extra_metadata.get(lib_name, {}))
548 
549  # store to results
550  result[lib_name] = lib_dict
551 
552  # Rename targets marked with "_RENAME" extra metadata.
553  # This is mostly a cosmetic change to ensure that we end up with build.yaml target
554  # names we're used to from the past (and also to avoid too long target names).
555  # The rename step needs to be made after we're done with most of processing logic
556  # otherwise the already-renamed libraries will have different names than expected
557  for lib_name in lib_names:
558  to_name = build_extra_metadata.get(lib_name, {}).get('_RENAME', None)
559  if to_name:
560  # store lib under the new name and also change its 'name' property
561  if to_name in result:
562  raise Exception('Cannot rename target ' + str(lib_name) + ', ' +
563  str(to_name) + ' already exists.')
564  lib_dict = result.pop(lib_name)
565  lib_dict['name'] = to_name
566  result[to_name] = lib_dict
567 
568  # dep names need to be updated as well
569  for lib_dict_to_update in list(result.values()):
570  lib_dict_to_update['deps'] = list([
571  to_name if dep == lib_name else dep
572  for dep in lib_dict_to_update['deps']
573  ])
574 
575  return result
576 
577 
578 def _convert_to_build_yaml_like(lib_dict: BuildMetadata) -> BuildYaml:
579  lib_names = [
580  lib_name for lib_name in list(lib_dict.keys())
581  if lib_dict[lib_name].get('_TYPE', 'library') == 'library'
582  ]
583  target_names = [
584  lib_name for lib_name in list(lib_dict.keys())
585  if lib_dict[lib_name].get('_TYPE', 'library') == 'target'
586  ]
587  test_names = [
588  lib_name for lib_name in list(lib_dict.keys())
589  if lib_dict[lib_name].get('_TYPE', 'library') == 'test'
590  ]
591 
592  # list libraries and targets in predefined order
593  lib_list = [lib_dict[lib_name] for lib_name in lib_names]
594  target_list = [lib_dict[lib_name] for lib_name in target_names]
595  test_list = [lib_dict[lib_name] for lib_name in test_names]
596 
597  # get rid of temporary private fields prefixed with "_" and some other useless fields
598  for lib in lib_list:
599  for field_to_remove in [
600  k for k in list(lib.keys()) if k.startswith('_')
601  ]:
602  lib.pop(field_to_remove, None)
603  for target in target_list:
604  for field_to_remove in [
605  k for k in list(target.keys()) if k.startswith('_')
606  ]:
607  target.pop(field_to_remove, None)
608  target.pop('public_headers',
609  None) # public headers make no sense for targets
610  for test in test_list:
611  for field_to_remove in [
612  k for k in list(test.keys()) if k.startswith('_')
613  ]:
614  test.pop(field_to_remove, None)
615  test.pop('public_headers',
616  None) # public headers make no sense for tests
617 
618  build_yaml_like = {
619  'libs': lib_list,
620  'filegroups': [],
621  'targets': target_list,
622  'tests': test_list,
623  }
624  return build_yaml_like
625 
626 
627 def _extract_cc_tests(bazel_rules: BuildDict) -> List[str]:
628  """Gets list of cc_test tests from bazel rules"""
629  result = []
630  for bazel_rule in list(bazel_rules.values()):
631  if bazel_rule['class'] == 'cc_test':
632  test_name = bazel_rule['name']
633  if test_name.startswith('//'):
634  prefixlen = len('//')
635  result.append(test_name[prefixlen:])
636  return list(sorted(result))
637 
638 
639 def _exclude_unwanted_cc_tests(tests: List[str]) -> List[str]:
640  """Filters out bazel tests that we don't want to run with other build systems or we cannot build them reasonably"""
641 
642  # most qps tests are autogenerated, we are fine without them
643  tests = [test for test in tests if not test.startswith('test/cpp/qps:')]
644  # microbenchmarks aren't needed for checking correctness
645  tests = [
646  test for test in tests
647  if not test.startswith('test/cpp/microbenchmarks:')
648  ]
649  tests = [
650  test for test in tests
651  if not test.startswith('test/core/promise/benchmark:')
652  ]
653 
654  # we have trouble with census dependency outside of bazel
655  tests = [
656  test for test in tests
657  if not test.startswith('test/cpp/ext/filters/census:') and
658  not test.startswith('test/core/xds:xds_channel_stack_modifier_test')
659  ]
660 
661  # missing opencensus/stats/stats.h
662  tests = [
663  test for test in tests if not test.startswith(
664  'test/cpp/end2end:server_load_reporting_end2end_test')
665  ]
666  tests = [
667  test for test in tests if not test.startswith(
668  'test/cpp/server/load_reporter:lb_load_reporter_test')
669  ]
670 
671  # The test uses --running_under_bazel cmdline argument
672  # To avoid the trouble needing to adjust it, we just skip the test
673  tests = [
674  test for test in tests if not test.startswith(
675  'test/cpp/naming:resolver_component_tests_runner_invoker')
676  ]
677 
678  # the test requires 'client_crash_test_server' to be built
679  tests = [
680  test for test in tests
681  if not test.startswith('test/cpp/end2end:time_change_test')
682  ]
683 
684  # the test requires 'client_crash_test_server' to be built
685  tests = [
686  test for test in tests
687  if not test.startswith('test/cpp/end2end:client_crash_test')
688  ]
689 
690  # the test requires 'server_crash_test_client' to be built
691  tests = [
692  test for test in tests
693  if not test.startswith('test/cpp/end2end:server_crash_test')
694  ]
695 
696  # test never existed under build.yaml and it fails -> skip it
697  tests = [
698  test for test in tests
699  if not test.startswith('test/core/tsi:ssl_session_cache_test')
700  ]
701 
702  # the binary of this test does not get built with cmake
703  tests = [
704  test for test in tests
705  if not test.startswith('test/cpp/util:channelz_sampler_test')
706  ]
707 
708  # we don't need to generate fuzzers outside of bazel
709  tests = [test for test in tests if not test.endswith('_fuzzer')]
710 
711  return tests
712 
713 
715  tests: List[str], bazel_rules: BuildDict) -> BuildDict:
716  """For given tests, generate the "extra metadata" that we need for our "build.yaml"-like output. The extra metadata is generated from the bazel rule metadata by using a bunch of heuristics."""
717  test_metadata = {}
718  for test in tests:
719  test_dict = {'build': 'test', '_TYPE': 'target'}
720 
721  bazel_rule = bazel_rules[_get_bazel_label(test)]
722 
723  bazel_tags = bazel_rule['tags']
724  if 'manual' in bazel_tags:
725  # don't run the tests marked as "manual"
726  test_dict['run'] = False
727 
728  if bazel_rule['flaky']:
729  # don't run tests that are marked as "flaky" under bazel
730  # because that would only add noise for the run_tests.py tests
731  # and seeing more failures for tests that we already know are flaky
732  # doesn't really help anything
733  test_dict['run'] = False
734 
735  if 'no_uses_polling' in bazel_tags:
736  test_dict['uses_polling'] = False
737 
738  if 'grpc_fuzzer' == bazel_rule['generator_function']:
739  # currently we hand-list fuzzers instead of generating them automatically
740  # because there's no way to obtain maxlen property from bazel BUILD file.
741  print(('skipping fuzzer ' + test))
742  continue
743 
744  if 'bazel_only' in bazel_tags:
745  continue
746 
747  # if any tags that restrict platform compatibility are present,
748  # generate the "platforms" field accordingly
749  # TODO(jtattermusch): there is also a "no_linux" tag, but we cannot take
750  # it into account as it is applied by grpc_cc_test when poller expansion
751  # is made (for tests where uses_polling=True). So for now, we just
752  # assume all tests are compatible with linux and ignore the "no_linux" tag
753  # completely.
754  known_platform_tags = set(['no_windows', 'no_mac'])
755  if set(bazel_tags).intersection(known_platform_tags):
756  platforms = []
757  # assume all tests are compatible with linux and posix
758  platforms.append('linux')
759  platforms.append(
760  'posix') # there is no posix-specific tag in bazel BUILD
761  if not 'no_mac' in bazel_tags:
762  platforms.append('mac')
763  if not 'no_windows' in bazel_tags:
764  platforms.append('windows')
765  test_dict['platforms'] = platforms
766 
767  cmdline_args = bazel_rule['args']
768  if cmdline_args:
769  test_dict['args'] = list(cmdline_args)
770 
771  if test.startswith('test/cpp'):
772  test_dict['language'] = 'c++'
773 
774  elif test.startswith('test/core'):
775  test_dict['language'] = 'c'
776  else:
777  raise Exception('wrong test' + test)
778 
779  # short test name without the path.
780  # There can be name collisions, but we will resolve them later
781  simple_test_name = os.path.basename(_extract_source_file_path(test))
782  test_dict['_RENAME'] = simple_test_name
783 
784  test_metadata[test] = test_dict
785 
786  # detect duplicate test names
787  tests_by_simple_name = {}
788  for test_name, test_dict in list(test_metadata.items()):
789  simple_test_name = test_dict['_RENAME']
790  if not simple_test_name in tests_by_simple_name:
791  tests_by_simple_name[simple_test_name] = []
792  tests_by_simple_name[simple_test_name].append(test_name)
793 
794  # choose alternative names for tests with a name collision
795  for collision_list in list(tests_by_simple_name.values()):
796  if len(collision_list) > 1:
797  for test_name in collision_list:
798  long_name = test_name.replace('/', '_').replace(':', '_')
799  print((
800  'short name of "%s" collides with another test, renaming to %s'
801  % (test_name, long_name)))
802  test_metadata[test_name]['_RENAME'] = long_name
803 
804  return test_metadata
805 
806 
807 def _parse_http_archives(xml_tree: ET.Element) -> 'List[ExternalProtoLibrary]':
808  """Parse Bazel http_archive rule into ExternalProtoLibrary objects."""
809  result = []
810  for xml_http_archive in xml_tree:
811  if xml_http_archive.tag != 'rule' or xml_http_archive.attrib[
812  'class'] != 'http_archive':
813  continue
814  # A distilled Python representation of Bazel http_archive
815  http_archive = dict()
816  for xml_node in xml_http_archive:
817  if xml_node.attrib['name'] == 'name':
818  http_archive["name"] = xml_node.attrib['value']
819  if xml_node.attrib['name'] == 'urls':
820  http_archive["urls"] = []
821  for url_node in xml_node:
822  http_archive["urls"].append(url_node.attrib['value'])
823  if xml_node.attrib['name'] == 'url':
824  http_archive["urls"] = [xml_node.attrib['value']]
825  if xml_node.attrib['name'] == 'sha256':
826  http_archive["hash"] = xml_node.attrib['value']
827  if xml_node.attrib['name'] == 'strip_prefix':
828  http_archive["strip_prefix"] = xml_node.attrib['value']
829  if http_archive["name"] not in EXTERNAL_PROTO_LIBRARIES:
830  # If this http archive is not one of the external proto libraries,
831  # we don't want to include it as a CMake target
832  continue
833  lib = EXTERNAL_PROTO_LIBRARIES[http_archive["name"]]
834  lib.urls = http_archive["urls"]
835  lib.hash = http_archive["hash"]
836  lib.strip_prefix = http_archive["strip_prefix"]
837  result.append(lib)
838  return result
839 
840 
841 def _generate_external_proto_libraries() -> List[Dict[str, Any]]:
842  """Generates the build metadata for external proto libraries"""
843  xml_tree = _bazel_query_xml_tree('kind(http_archive, //external:*)')
844  libraries = _parse_http_archives(xml_tree)
845  libraries.sort(key=lambda x: x.destination)
846  return list(map(lambda x: x.__dict__, libraries))
847 
848 
849 def _detect_and_print_issues(build_yaml_like: BuildYaml) -> None:
850  """Try detecting some unusual situations and warn about them."""
851  for tgt in build_yaml_like['targets']:
852  if tgt['build'] == 'test':
853  for src in tgt['src']:
854  if src.startswith('src/') and not src.endswith('.proto'):
855  print(('source file from under "src/" tree used in test ' +
856  tgt['name'] + ': ' + src))
857 
858 
859 # extra metadata that will be used to construct build.yaml
860 # there are mostly extra properties that we weren't able to obtain from the bazel build
861 # _TYPE: whether this is library, target or test
862 # _RENAME: whether this target should be renamed to a different name (to match expectations of make and cmake builds)
863 _BUILD_EXTRA_METADATA = {
864  'third_party/address_sorting:address_sorting': {
865  'language': 'c',
866  'build': 'all',
867  '_RENAME': 'address_sorting'
868  },
869  'gpr': {
870  'language': 'c',
871  'build': 'all',
872  },
873  'grpc': {
874  'language': 'c',
875  'build': 'all',
876  'baselib': True,
877  'generate_plugin_registry': True
878  },
879  'grpc++': {
880  'language': 'c++',
881  'build': 'all',
882  'baselib': True,
883  },
884  'grpc++_alts': {
885  'language': 'c++',
886  'build': 'all',
887  'baselib': True
888  },
889  'grpc++_error_details': {
890  'language': 'c++',
891  'build': 'all'
892  },
893  'grpc++_reflection': {
894  'language': 'c++',
895  'build': 'all'
896  },
897  'grpc++_unsecure': {
898  'language': 'c++',
899  'build': 'all',
900  'baselib': True,
901  },
902  'grpc_unsecure': {
903  'language': 'c',
904  'build': 'all',
905  'baselib': True,
906  'generate_plugin_registry': True
907  },
908  'grpcpp_channelz': {
909  'language': 'c++',
910  'build': 'all'
911  },
912  'grpc++_test': {
913  'language': 'c++',
914  'build': 'private',
915  },
916  'src/compiler:grpc_plugin_support': {
917  'language': 'c++',
918  'build': 'protoc',
919  '_RENAME': 'grpc_plugin_support'
920  },
921  'src/compiler:grpc_cpp_plugin': {
922  'language': 'c++',
923  'build': 'protoc',
924  '_TYPE': 'target',
925  '_RENAME': 'grpc_cpp_plugin'
926  },
927  'src/compiler:grpc_csharp_plugin': {
928  'language': 'c++',
929  'build': 'protoc',
930  '_TYPE': 'target',
931  '_RENAME': 'grpc_csharp_plugin'
932  },
933  'src/compiler:grpc_node_plugin': {
934  'language': 'c++',
935  'build': 'protoc',
936  '_TYPE': 'target',
937  '_RENAME': 'grpc_node_plugin'
938  },
939  'src/compiler:grpc_objective_c_plugin': {
940  'language': 'c++',
941  'build': 'protoc',
942  '_TYPE': 'target',
943  '_RENAME': 'grpc_objective_c_plugin'
944  },
945  'src/compiler:grpc_php_plugin': {
946  'language': 'c++',
947  'build': 'protoc',
948  '_TYPE': 'target',
949  '_RENAME': 'grpc_php_plugin'
950  },
951  'src/compiler:grpc_python_plugin': {
952  'language': 'c++',
953  'build': 'protoc',
954  '_TYPE': 'target',
955  '_RENAME': 'grpc_python_plugin'
956  },
957  'src/compiler:grpc_ruby_plugin': {
958  'language': 'c++',
959  'build': 'protoc',
960  '_TYPE': 'target',
961  '_RENAME': 'grpc_ruby_plugin'
962  },
963 
964  # TODO(jtattermusch): consider adding grpc++_core_stats
965 
966  # test support libraries
967  'test/core/util:grpc_test_util': {
968  'language': 'c',
969  'build': 'private',
970  '_RENAME': 'grpc_test_util'
971  },
972  'test/core/util:grpc_test_util_unsecure': {
973  'language': 'c',
974  'build': 'private',
975  '_RENAME': 'grpc_test_util_unsecure'
976  },
977  # TODO(jtattermusch): consider adding grpc++_test_util_unsecure - it doesn't seem to be used by bazel build (don't forget to set secure: False)
978  'test/cpp/util:test_config': {
979  'language': 'c++',
980  'build': 'private',
981  '_RENAME': 'grpc++_test_config'
982  },
983  'test/cpp/util:test_util': {
984  'language': 'c++',
985  'build': 'private',
986  '_RENAME': 'grpc++_test_util'
987  },
988 
989  # end2end test support libraries
990  'test/core/end2end:end2end_tests': {
991  'language': 'c',
992  'build': 'private',
993  '_RENAME': 'end2end_tests'
994  },
995 
996  # benchmark support libraries
997  'test/cpp/microbenchmarks:helpers': {
998  'language': 'c++',
999  'build': 'test',
1000  'defaults': 'benchmark',
1001  '_RENAME': 'benchmark_helpers'
1002  },
1003  'test/cpp/interop:interop_client': {
1004  'language': 'c++',
1005  'build': 'test',
1006  'run': False,
1007  '_TYPE': 'target',
1008  '_RENAME': 'interop_client'
1009  },
1010  'test/cpp/interop:interop_server': {
1011  'language': 'c++',
1012  'build': 'test',
1013  'run': False,
1014  '_TYPE': 'target',
1015  '_RENAME': 'interop_server'
1016  },
1017  'test/cpp/interop:xds_interop_client': {
1018  'language': 'c++',
1019  'build': 'test',
1020  'run': False,
1021  '_TYPE': 'target',
1022  '_RENAME': 'xds_interop_client'
1023  },
1024  'test/cpp/interop:xds_interop_server': {
1025  'language': 'c++',
1026  'build': 'test',
1027  'run': False,
1028  '_TYPE': 'target',
1029  '_RENAME': 'xds_interop_server'
1030  },
1031  'test/cpp/interop:http2_client': {
1032  'language': 'c++',
1033  'build': 'test',
1034  'run': False,
1035  '_TYPE': 'target',
1036  '_RENAME': 'http2_client'
1037  },
1038  'test/cpp/qps:qps_json_driver': {
1039  'language': 'c++',
1040  'build': 'test',
1041  'run': False,
1042  '_TYPE': 'target',
1043  '_RENAME': 'qps_json_driver'
1044  },
1045  'test/cpp/qps:qps_worker': {
1046  'language': 'c++',
1047  'build': 'test',
1048  'run': False,
1049  '_TYPE': 'target',
1050  '_RENAME': 'qps_worker'
1051  },
1052  'test/cpp/util:grpc_cli': {
1053  'language': 'c++',
1054  'build': 'test',
1055  'run': False,
1056  '_TYPE': 'target',
1057  '_RENAME': 'grpc_cli'
1058  },
1059 
1060  # TODO(jtattermusch): create_jwt and verify_jwt breaks distribtests because it depends on grpc_test_utils and thus requires tests to be built
1061  # For now it's ok to disable them as these binaries aren't very useful anyway.
1062  #'test/core/security:create_jwt': { 'language': 'c', 'build': 'tool', '_TYPE': 'target', '_RENAME': 'grpc_create_jwt' },
1063  #'test/core/security:verify_jwt': { 'language': 'c', 'build': 'tool', '_TYPE': 'target', '_RENAME': 'grpc_verify_jwt' },
1064 
1065  # TODO(jtattermusch): add remaining tools such as grpc_print_google_default_creds_token (they are not used by bazel build)
1066 
1067  # TODO(jtattermusch): these fuzzers had no build.yaml equivalent
1068  # test/core/compression:message_compress_fuzzer
1069  # test/core/compression:message_decompress_fuzzer
1070  # test/core/compression:stream_compression_fuzzer
1071  # test/core/compression:stream_decompression_fuzzer
1072  # test/core/slice:b64_decode_fuzzer
1073  # test/core/slice:b64_encode_fuzzer
1074 }
1075 
1076 # We need a complete picture of all the targets and dependencies we're interested in
1077 # so we run multiple bazel queries and merge the results.
1078 _BAZEL_DEPS_QUERIES = [
1079  'deps("//test/...")',
1080  'deps("//:all")',
1081  'deps("//src/compiler/...")',
1082  'deps("//src/proto/...")',
1083  # The ^ is needed to differentiate proto_library from go_proto_library
1084  'deps(kind("^proto_library", @envoy_api//envoy/...))',
1085 ]
1086 
1087 # Step 1: run a bunch of "bazel query --output xml" queries to collect
1088 # the raw build metadata from the bazel build.
1089 # At the end of this step we will have a dictionary of bazel rules
1090 # that are interesting to us (libraries, binaries, etc.) along
1091 # with their most important metadata (sources, headers, dependencies)
1092 #
1093 # Example of a single bazel rule after being populated:
1094 # '//:grpc' : { 'class': 'cc_library',
1095 # 'hdrs': ['//:include/grpc/byte_buffer.h', ... ],
1096 # 'srcs': ['//:src/core/lib/surface/init.cc', ... ],
1097 # 'deps': ['//:grpc_common', ...],
1098 # ... }
1099 bazel_rules = {}
1100 for query in _BAZEL_DEPS_QUERIES:
1101  bazel_rules.update(
1103 
1104 # Step 1.5: The sources for UPB protos are pre-generated, so we want
1105 # to expand the UPB proto library bazel rules into the generated
1106 # .upb.h and .upb.c files.
1108 
1109 # Step 2: Extract the known bazel cc_test tests. While most tests
1110 # will be buildable with other build systems just fine, some of these tests
1111 # would be too difficult to build and run with other build systems,
1112 # so we simply exclude the ones we don't want.
1113 # Note that while making tests buildable with other build systems
1114 # than just bazel is extra effort, we still need to do that for these
1115 # reasons:
1116 # - If our cmake build doesn't have any tests at all, it's hard to make
1117 # sure that what it built actually works (we need at least some "smoke tests").
1118 # This is quite important because the build flags between bazel / non-bazel flag might differ
1119 # (sometimes it's for interesting reasons that are not easy to overcome)
1120 # which makes it even more important to have at least some tests for cmake/make
1121 # - Our portability suite actually runs cmake tests and migration of portability
1122 # suite fully towards bazel might be intricate (e.g. it's unclear whether it's
1123 # possible to get a good enough coverage of different compilers / distros etc.
1124 # with bazel)
1125 # - some things that are considered "tests" in build.yaml-based builds are actually binaries
1126 # we'd want to be able to build anyway (qps_json_worker, interop_client, interop_server, grpc_cli)
1127 # so it's unclear how much make/cmake simplification we would gain by removing just some (but not all) test
1128 # TODO(jtattermusch): Investigate feasibility of running portability suite with bazel.
1130 
1131 # Step 3: Generate the "extra metadata" for all our build targets.
1132 # While the bazel rules give us most of the information we need,
1133 # the legacy "build.yaml" format requires some additional fields that
1134 # we cannot get just from bazel alone (we call that "extra metadata").
1135 # In this step, we basically analyze the build metadata we have from bazel
1136 # and use heuristics to determine (and sometimes guess) the right
1137 # extra metadata to use for each target.
1138 #
1139 # - For some targets (such as the public libraries, helper libraries
1140 # and executables) determining the right extra metadata is hard to do
1141 # automatically. For these targets, the extra metadata is supplied "manually"
1142 # in form of the _BUILD_EXTRA_METADATA dictionary. That allows us to match
1143 # the semantics of the legacy "build.yaml" as closely as possible.
1144 #
1145 # - For test binaries, it is possible to generate the "extra metadata" mostly
1146 # automatically using a rule-based heuristic approach because most tests
1147 # look and behave alike from the build's perspective.
1148 #
1149 # TODO(jtattermusch): Of course neither "_BUILD_EXTRA_METADATA" or
1150 # the heuristic approach used for tests are ideal and they cannot be made
1151 # to cover all possible situations (and are tailored to work with the way
1152 # the grpc build currently works), but the idea was to start with something
1153 # reasonably simple that matches the "build.yaml"-like semantics as closely
1154 # as possible (to avoid changing too many things at once) and gradually get
1155 # rid of the legacy "build.yaml"-specific fields one by one. Once that is done,
1156 # only very little "extra metadata" would be needed and/or it would be trivial
1157 # to generate it automatically.
1158 all_extra_metadata = {}
1159 all_extra_metadata.update(_BUILD_EXTRA_METADATA)
1160 all_extra_metadata.update(
1161  _generate_build_extra_metadata_for_tests(tests, bazel_rules))
1162 
1163 # Step 4: Compute the build metadata that will be used in the final build.yaml.
1164 # The final build metadata includes transitive dependencies, and sources/headers
1165 # expanded without intermediate dependencies.
1166 # Example:
1167 # '//:grpc' : { ...,
1168 # '_TRANSITIVE_DEPS': ['//:gpr_base', ...],
1169 # '_COLLAPSED_DEPS': ['gpr', ...],
1170 # '_COLLAPSED_SRCS': [...],
1171 # '_COLLAPSED_PUBLIC_HEADERS': [...],
1172 # '_COLLAPSED_HEADERS': [...]
1173 # }
1174 _populate_transitive_metadata(bazel_rules, list(all_extra_metadata.keys()))
1175 
1176 # Step 4a: Update the existing test metadata with the updated build metadata.
1177 # Certain build metadata of certain test targets depend on the transitive
1178 # metadata that wasn't available earlier.
1179 update_test_metadata_with_transitive_metadata(all_extra_metadata, bazel_rules)
1180 
1181 # Step 5: Generate the final metadata for all the targets.
1182 # This is done by combining the bazel build metadata and the "extra metadata"
1183 # we obtained in the previous step.
1184 # In this step, we also perform some interesting massaging of the target metadata
1185 # to end up with a result that is as similar to the legacy build.yaml data
1186 # as possible.
1187 # - Some targets get renamed (to match the legacy build.yaml target names)
1188 # - Some intermediate libraries get elided ("expanded") to better match the set
1189 # of targets provided by the legacy build.yaml build
1190 #
1191 # Originally the target renaming was introduced to address these concerns:
1192 # - avoid changing too many things at the same time and avoid people getting
1193 # confused by some well know targets suddenly being missing
1194 # - Makefile/cmake and also language-specific generators rely on some build
1195 # targets being called exactly the way they they are. Some of our testing
1196 # scrips also invoke executables (e.g. "qps_json_driver") by their name.
1197 # - The autogenerated test name from bazel includes the package path
1198 # (e.g. "test_cpp_TEST_NAME"). Without renaming, the target names would
1199 # end up pretty ugly (e.g. test_cpp_qps_qps_json_driver).
1200 # TODO(jtattermusch): reevaluate the need for target renaming in the future.
1201 #
1202 # Example of a single generated target:
1203 # 'grpc' : { 'language': 'c',
1204 # 'public_headers': ['include/grpc/byte_buffer.h', ... ],
1205 # 'headers': ['src/core/ext/filters/client_channel/client_channel.h', ... ],
1206 # 'src': ['src/core/lib/surface/init.cc', ... ],
1207 # 'deps': ['gpr', 'address_sorting', ...],
1208 # ... }
1209 all_targets_dict = _generate_build_metadata(all_extra_metadata, bazel_rules)
1210 
1211 # Step 6: convert the dictionary with all the targets to a dict that has
1212 # the desired "build.yaml"-like layout.
1213 # TODO(jtattermusch): We use the custom "build.yaml"-like layout because
1214 # currently all other build systems use that format as their source of truth.
1215 # In the future, we can get rid of this custom & legacy format entirely,
1216 # but we would need to update the generators for other build systems
1217 # at the same time.
1218 #
1219 # Layout of the result:
1220 # { 'libs': { TARGET_DICT_FOR_LIB_XYZ, ... },
1221 # 'targets': { TARGET_DICT_FOR_BIN_XYZ, ... },
1222 # 'tests': { TARGET_DICT_FOR_TEST_XYZ, ...} }
1223 build_yaml_like = _convert_to_build_yaml_like(all_targets_dict)
1224 
1225 # Step 7: generates build metadata for external ProtoBuf libraries.
1226 # We only want the ProtoBuf sources from these ProtoBuf dependencies, which may
1227 # not be present in our release source tar balls. These rules will be used in CMake
1228 # to download these libraries if not existed. Even if the download failed, it
1229 # will be a soft error that doesn't block existing target from successfully
1230 # built.
1231 build_yaml_like[
1232  'external_proto_libraries'] = _generate_external_proto_libraries()
1233 
1234 # detect and report some suspicious situations we've seen before
1235 _detect_and_print_issues(build_yaml_like)
1236 
1237 # Step 7: Store the build_autogenerated.yaml in a deterministic (=sorted)
1238 # and cleaned-up form.
1239 # A basic overview of the resulting "build.yaml"-like format is here:
1240 # https://github.com/grpc/grpc/blob/master/templates/README.md
1241 # TODO(jtattermusch): The "cleanup" function is taken from the legacy
1242 # build system (which used build.yaml) and can be eventually removed.
1244  build_yaml_like)
1245 with open('build_autogenerated.yaml', 'w') as file:
1246  file.write(build_yaml_string)
xds_interop_client.str
str
Definition: xds_interop_client.py:487
extract_metadata_from_bazel_xml.ExternalProtoLibrary.destination
destination
Definition: extract_metadata_from_bazel_xml.py:67
extract_metadata_from_bazel_xml.ExternalProtoLibrary.hash
hash
Definition: extract_metadata_from_bazel_xml.py:73
extract_metadata_from_bazel_xml._expand_upb_proto_library_rules
def _expand_upb_proto_library_rules(bazel_rules)
Definition: extract_metadata_from_bazel_xml.py:485
http2_test_server.format
format
Definition: http2_test_server.py:118
extract_metadata_from_bazel_xml._get_bazel_label
str _get_bazel_label(str target_name)
Definition: extract_metadata_from_bazel_xml.py:168
get
absl::string_view get(const Cont &c)
Definition: abseil-cpp/absl/strings/str_replace_test.cc:185
extract_metadata_from_bazel_xml.ExternalProtoLibrary
Definition: extract_metadata_from_bazel_xml.py:50
extract_metadata_from_bazel_xml._bazel_query_xml_tree
ET.Element _bazel_query_xml_tree(str query)
Definition: extract_metadata_from_bazel_xml.py:105
extract_metadata_from_bazel_xml._extract_cc_tests
List[str] _extract_cc_tests(BuildDict bazel_rules)
Definition: extract_metadata_from_bazel_xml.py:627
map
zval * map
Definition: php/ext/google/protobuf/encode_decode.c:480
extract_metadata_from_bazel_xml._extract_source_file_path
str _extract_source_file_path(str label)
Definition: extract_metadata_from_bazel_xml.py:177
extract_metadata_from_bazel_xml._extract_sources
List[str] _extract_sources(BuildMetadata bazel_rule)
Definition: extract_metadata_from_bazel_xml.py:208
extract_metadata_from_bazel_xml._parse_http_archives
'List[ExternalProtoLibrary]' _parse_http_archives(ET.Element xml_tree)
Definition: extract_metadata_from_bazel_xml.py:807
extract_metadata_from_bazel_xml._populate_transitive_metadata
None _populate_transitive_metadata(Any bazel_rules, Iterable[str] public_dep_names)
Definition: extract_metadata_from_bazel_xml.py:430
extract_metadata_from_bazel_xml._generate_external_proto_libraries
List[Dict[str, Any]] _generate_external_proto_libraries()
Definition: extract_metadata_from_bazel_xml.py:841
extract_metadata_from_bazel_xml.ExternalProtoLibrary.urls
urls
Definition: extract_metadata_from_bazel_xml.py:70
extract_metadata_from_bazel_xml.update_test_metadata_with_transitive_metadata
None update_test_metadata_with_transitive_metadata(BuildDict all_extra_metadata, BuildDict bazel_rules)
Definition: extract_metadata_from_bazel_xml.py:446
extract_metadata_from_bazel_xml._extract_nonpublic_headers
List[str] _extract_nonpublic_headers(BuildMetadata bazel_rule)
Definition: extract_metadata_from_bazel_xml.py:198
extract_metadata_from_bazel_xml._get_transitive_protos
def _get_transitive_protos(bazel_rules, t)
Definition: extract_metadata_from_bazel_xml.py:465
extract_metadata_from_bazel_xml.ExternalProtoLibrary.proto_prefix
proto_prefix
Definition: extract_metadata_from_bazel_xml.py:68
extract_metadata_from_bazel_xml._rule_dict_from_xml_node
def _rule_dict_from_xml_node(rule_xml_node)
Definition: extract_metadata_from_bazel_xml.py:112
extract_metadata_from_bazel_xml._generate_build_metadata
BuildDict _generate_build_metadata(BuildDict build_extra_metadata, BuildDict bazel_rules)
Definition: extract_metadata_from_bazel_xml.py:537
extract_metadata_from_bazel_xml._extract_deps
List[str] _extract_deps(BuildMetadata bazel_rule, BuildDict bazel_rules)
Definition: extract_metadata_from_bazel_xml.py:232
extract_metadata_from_bazel_xml._compute_transitive_metadata
None _compute_transitive_metadata(str rule_name, Any bazel_rules, Dict[str, str] bazel_label_to_dep_name)
Definition: extract_metadata_from_bazel_xml.py:289
extract_metadata_from_bazel_xml._external_dep_name_from_bazel_dependency
Optional[str] _external_dep_name_from_bazel_dependency(str bazel_dep)
Definition: extract_metadata_from_bazel_xml.py:270
extract_metadata_from_bazel_xml._create_target_from_bazel_rule
BuildMetadata _create_target_from_bazel_rule(str target_name, BuildDict bazel_rules)
Definition: extract_metadata_from_bazel_xml.py:247
build_cleaner.cleaned_build_yaml_dict_as_string
def cleaned_build_yaml_dict_as_string(indict)
Definition: build_cleaner.py:71
cpp.gmock_class.set
set
Definition: bloaty/third_party/googletest/googlemock/scripts/generator/cpp/gmock_class.py:44
extract_metadata_from_bazel_xml._extract_public_headers
List[str] _extract_public_headers(BuildMetadata bazel_rule)
Definition: extract_metadata_from_bazel_xml.py:189
extract_metadata_from_bazel_xml.ExternalProtoLibrary.strip_prefix
strip_prefix
Definition: extract_metadata_from_bazel_xml.py:74
extract_metadata_from_bazel_xml.ExternalProtoLibrary.__init__
def __init__(self, destination, proto_prefix, urls=None, hash="", strip_prefix="")
Definition: extract_metadata_from_bazel_xml.py:66
extract_metadata_from_bazel_xml._convert_to_build_yaml_like
BuildYaml _convert_to_build_yaml_like(BuildMetadata lib_dict)
Definition: extract_metadata_from_bazel_xml.py:578
open
#define open
Definition: test-fs.c:46
len
int len
Definition: abseil-cpp/absl/base/internal/low_level_alloc_test.cc:46
extract_metadata_from_bazel_xml._detect_and_print_issues
None _detect_and_print_issues(BuildYaml build_yaml_like)
Definition: extract_metadata_from_bazel_xml.py:849
extract_metadata_from_bazel_xml._exclude_unwanted_cc_tests
List[str] _exclude_unwanted_cc_tests(List[str] tests)
Definition: extract_metadata_from_bazel_xml.py:639
extract_metadata_from_bazel_xml._maybe_get_internal_path
Optional[str] _maybe_get_internal_path(str name)
Definition: extract_metadata_from_bazel_xml.py:98
extract_metadata_from_bazel_xml._generate_build_extra_metadata_for_tests
BuildDict _generate_build_extra_metadata_for_tests(List[str] tests, BuildDict bazel_rules)
Definition: extract_metadata_from_bazel_xml.py:714
extract_metadata_from_bazel_xml._extract_rules_from_bazel_xml
def _extract_rules_from_bazel_xml(xml_tree)
Definition: extract_metadata_from_bazel_xml.py:144


grpc
Author(s):
autogenerated on Fri May 16 2025 02:58:21