Merge changes I29aa75d5,I848a020f,I7f0d82e3
* changes: vndk-def: Add libtextclassifier_hash to LL-NDK-Indirect sourcedr: Add module path filters sourcedr: Add 2 commands to check source file deps
This commit is contained in:
@@ -184,6 +184,7 @@ def main():
|
|||||||
'ld-android',
|
'ld-android',
|
||||||
'libc_malloc_debug',
|
'libc_malloc_debug',
|
||||||
'libnetd_client',
|
'libnetd_client',
|
||||||
|
'libtextclassifier_hash',
|
||||||
]
|
]
|
||||||
for name in libs:
|
for name in libs:
|
||||||
update_tag('/system/${LIB}/' + name + '.so', 'LL-NDK-Private')
|
update_tag('/system/${LIB}/' + name + '.so', 'LL-NDK-Private')
|
||||||
|
|||||||
@@ -133,7 +133,7 @@ class LexerError(ValueError):
|
|||||||
class Lexer(object):
|
class Lexer(object):
|
||||||
"""Lexer to tokenize the input string."""
|
"""Lexer to tokenize the input string."""
|
||||||
|
|
||||||
def __init__(self, buf, offset=0):
|
def __init__(self, buf, offset=0, path=None):
|
||||||
"""Tokenize the source code in buf starting from offset.
|
"""Tokenize the source code in buf starting from offset.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -147,6 +147,7 @@ class Lexer(object):
|
|||||||
self.end = offset
|
self.end = offset
|
||||||
self.token = None
|
self.token = None
|
||||||
self.literal = None
|
self.literal = None
|
||||||
|
self.path = path
|
||||||
|
|
||||||
self._next()
|
self._next()
|
||||||
|
|
||||||
@@ -623,6 +624,7 @@ class Parser(object):
|
|||||||
def parse_module_definition(self, module_ident):
|
def parse_module_definition(self, module_ident):
|
||||||
"""Parse a module definition."""
|
"""Parse a module definition."""
|
||||||
properties = self.parse_dict()
|
properties = self.parse_dict()
|
||||||
|
properties['_path'] = String(self.lexer.path)
|
||||||
self.modules.append((module_ident, properties))
|
self.modules.append((module_ident, properties))
|
||||||
|
|
||||||
|
|
||||||
@@ -750,7 +752,13 @@ class RecursiveParser(object):
|
|||||||
"""This is a recursive parser which will parse blueprint files
|
"""This is a recursive parser which will parse blueprint files
|
||||||
recursively."""
|
recursively."""
|
||||||
|
|
||||||
|
|
||||||
|
# Default Blueprint file name
|
||||||
|
_DEFAULT_SUB_NAME = 'Android.bp'
|
||||||
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
"""Initialize a recursive parser."""
|
||||||
self.visited = set()
|
self.visited = set()
|
||||||
self.modules = []
|
self.modules = []
|
||||||
|
|
||||||
@@ -772,7 +780,7 @@ class RecursiveParser(object):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def find_sub_files_from_env(cls, rootdir, env, use_subdirs,
|
def find_sub_files_from_env(cls, rootdir, env, use_subdirs,
|
||||||
default_sub_name='Android.bp'):
|
default_sub_name=_DEFAULT_SUB_NAME):
|
||||||
"""Find the sub files from the names specified in build, subdirs, and
|
"""Find the sub files from the names specified in build, subdirs, and
|
||||||
optional_subdirs."""
|
optional_subdirs."""
|
||||||
|
|
||||||
@@ -800,7 +808,7 @@ class RecursiveParser(object):
|
|||||||
"""Read a blueprint file and return modules and the environment."""
|
"""Read a blueprint file and return modules and the environment."""
|
||||||
with open(path, 'r') as bp_file:
|
with open(path, 'r') as bp_file:
|
||||||
content = bp_file.read()
|
content = bp_file.read()
|
||||||
parser = Parser(Lexer(content), env)
|
parser = Parser(Lexer(content, path=path), env)
|
||||||
parser.parse()
|
parser.parse()
|
||||||
return (parser.modules, parser.vars)
|
return (parser.modules, parser.vars)
|
||||||
|
|
||||||
@@ -873,7 +881,8 @@ class RecursiveParser(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def parse_file(self, path, env=None, evaluate=True):
|
def parse_file(self, path, env=None, evaluate=True,
|
||||||
|
default_sub_name=_DEFAULT_SUB_NAME):
|
||||||
"""Parse blueprint files recursively."""
|
"""Parse blueprint files recursively."""
|
||||||
|
|
||||||
if env is None:
|
if env is None:
|
||||||
@@ -884,8 +893,8 @@ class RecursiveParser(object):
|
|||||||
if 'subdirs' in sub_env or 'optional_subdirs' in sub_env:
|
if 'subdirs' in sub_env or 'optional_subdirs' in sub_env:
|
||||||
self._parse_file_recursive(path, env, evaluate, True)
|
self._parse_file_recursive(path, env, evaluate, True)
|
||||||
else:
|
else:
|
||||||
self._scan_and_parse_all_file_recursive('Android.bp', path, env,
|
self._scan_and_parse_all_file_recursive(
|
||||||
evaluate)
|
default_sub_name, path, env, evaluate)
|
||||||
|
|
||||||
|
|
||||||
#------------------------------------------------------------------------------
|
#------------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -21,6 +21,8 @@ from __future__ import print_function
|
|||||||
import argparse
|
import argparse
|
||||||
import csv
|
import csv
|
||||||
import itertools
|
import itertools
|
||||||
|
import os
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import vndk
|
import vndk
|
||||||
@@ -32,10 +34,14 @@ def _parse_args():
|
|||||||
parser.add_argument('root_bp',
|
parser.add_argument('root_bp',
|
||||||
help='path to Android.bp in ANDROID_BUILD_TOP')
|
help='path to Android.bp in ANDROID_BUILD_TOP')
|
||||||
parser.add_argument('-o', '--output', help='path to output file')
|
parser.add_argument('-o', '--output', help='path to output file')
|
||||||
|
parser.add_argument('--exclude',
|
||||||
|
help='regular expression for the excluded directories')
|
||||||
|
parser.add_argument('--select',
|
||||||
|
help='regular expression for the selected directories')
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def print_vndk_module_csv(output_file, module_dicts):
|
def print_vndk_module_csv(output_file, module_dicts, root_dir, exclude, select):
|
||||||
"""Print vndk module list to output file."""
|
"""Print vndk module list to output file."""
|
||||||
|
|
||||||
all_libs = module_dicts.all_libs
|
all_libs = module_dicts.all_libs
|
||||||
@@ -46,10 +52,17 @@ def print_vndk_module_csv(output_file, module_dicts):
|
|||||||
module_names = sorted(set(
|
module_names = sorted(set(
|
||||||
itertools.chain(vndk_libs, vndk_sp_libs, vendor_available_libs)))
|
itertools.chain(vndk_libs, vndk_sp_libs, vendor_available_libs)))
|
||||||
|
|
||||||
|
root_dir_prefix_len = len(root_dir) + 1
|
||||||
|
|
||||||
writer = csv.writer(output_file, lineterminator='\n')
|
writer = csv.writer(output_file, lineterminator='\n')
|
||||||
writer.writerow(('name', 'vndk', 'vndk_sp', 'vendor_available', 'rule'))
|
writer.writerow(('name', 'vndk', 'vndk_sp', 'vendor_available', 'rule'))
|
||||||
for name in module_names:
|
for name in module_names:
|
||||||
rule = all_libs[name].rule
|
rule = all_libs[name].rule
|
||||||
|
path = all_libs[name].get_property('_path')[root_dir_prefix_len:]
|
||||||
|
if select and not select.match(path):
|
||||||
|
continue
|
||||||
|
if exclude and exclude.match(path):
|
||||||
|
continue
|
||||||
if '_header' not in rule and '_static' not in rule and \
|
if '_header' not in rule and '_static' not in rule and \
|
||||||
rule != 'toolchain_library':
|
rule != 'toolchain_library':
|
||||||
writer.writerow((name,
|
writer.writerow((name,
|
||||||
@@ -64,13 +77,22 @@ def main():
|
|||||||
|
|
||||||
args = _parse_args()
|
args = _parse_args()
|
||||||
|
|
||||||
|
# Convert select/exclude regular expressions
|
||||||
|
select = re.compile(args.select) if args.select else None
|
||||||
|
exclude = re.compile(args.exclude) if args.exclude else None
|
||||||
|
|
||||||
|
# Parse Blueprint files and get VNDK libs
|
||||||
module_dicts = vndk.ModuleClassifier.create_from_root_bp(args.root_bp)
|
module_dicts = vndk.ModuleClassifier.create_from_root_bp(args.root_bp)
|
||||||
|
|
||||||
|
root_dir = os.path.dirname(args.root_bp)
|
||||||
|
|
||||||
if args.output:
|
if args.output:
|
||||||
with open(args.output, 'w') as output_file:
|
with open(args.output, 'w') as output_file:
|
||||||
print_vndk_module_csv(output_file, module_dicts)
|
print_vndk_module_csv(
|
||||||
|
output_file, module_dicts, root_dir, exclude, select)
|
||||||
else:
|
else:
|
||||||
print_vndk_module_csv(sys.stdout, module_dicts)
|
print_vndk_module_csv(
|
||||||
|
sys.stdout, module_dicts, root_dir, exclude, select)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -596,5 +596,11 @@ class LexerTest(unittest.TestCase):
|
|||||||
lexer.consume(Token.EOF)
|
lexer.consume(Token.EOF)
|
||||||
|
|
||||||
|
|
||||||
|
def test_lexer_path(self):
|
||||||
|
"""Test the path attribute of the Lexer object."""
|
||||||
|
lexer = Lexer('content', path='test_path')
|
||||||
|
self.assertEqual(lexer.path, 'test_path')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -0,0 +1,67 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
#
|
||||||
|
# Copyright (C) 2018 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""This module contains the unit tests to whether module paths are kept
|
||||||
|
properly."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from blueprint import Lexer, Parser, RecursiveParser
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Module Path
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class ModulePathTest(unittest.TestCase):
|
||||||
|
"""Test cases for module path attribute."""
|
||||||
|
|
||||||
|
def test_module_path_from_lexer(self):
|
||||||
|
"""Test whether the path are passed from Lexer to parsed modules."""
|
||||||
|
content = '''
|
||||||
|
cc_library {
|
||||||
|
name: "libfoo",
|
||||||
|
}
|
||||||
|
'''
|
||||||
|
|
||||||
|
parser = Parser(Lexer(content, path='test_path'))
|
||||||
|
parser.parse()
|
||||||
|
|
||||||
|
self.assertEqual('test_path', parser.modules[0][1]['_path'])
|
||||||
|
|
||||||
|
|
||||||
|
def test_module_path_functional(self):
|
||||||
|
SUBNAME = 'MockBuild.txt'
|
||||||
|
|
||||||
|
test_dir = os.path.join(
|
||||||
|
os.path.dirname(__file__), 'testdata', 'example')
|
||||||
|
test_root_file = os.path.join(test_dir, SUBNAME)
|
||||||
|
|
||||||
|
parser = RecursiveParser()
|
||||||
|
parser.parse_file(test_root_file, default_sub_name=SUBNAME)
|
||||||
|
|
||||||
|
named_mods = {module[1]['name']: module for module in parser.modules}
|
||||||
|
|
||||||
|
self.assertEqual(os.path.join(test_dir, 'foo', SUBNAME),
|
||||||
|
named_mods['libfoo'][1]['_path'])
|
||||||
|
self.assertEqual(os.path.join(test_dir, 'bar', SUBNAME),
|
||||||
|
named_mods['libbar'][1]['_path'])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
0
vndk/tools/sourcedr/sourcedr/blueprint/tests/testdata/example/MockBuild.txt
vendored
Normal file
0
vndk/tools/sourcedr/sourcedr/blueprint/tests/testdata/example/MockBuild.txt
vendored
Normal file
3
vndk/tools/sourcedr/sourcedr/blueprint/tests/testdata/example/bar/MockBuild.txt
vendored
Normal file
3
vndk/tools/sourcedr/sourcedr/blueprint/tests/testdata/example/bar/MockBuild.txt
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
cc_library {
|
||||||
|
name: "libbar",
|
||||||
|
}
|
||||||
3
vndk/tools/sourcedr/sourcedr/blueprint/tests/testdata/example/foo/MockBuild.txt
vendored
Normal file
3
vndk/tools/sourcedr/sourcedr/blueprint/tests/testdata/example/foo/MockBuild.txt
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
cc_library {
|
||||||
|
name: "libfoo",
|
||||||
|
}
|
||||||
78
vndk/tools/sourcedr/sourcedr/list_installed_module_under_source.py
Executable file
78
vndk/tools/sourcedr/sourcedr/list_installed_module_under_source.py
Executable file
@@ -0,0 +1,78 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
import posixpath
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def match_any(regex, iterable):
|
||||||
|
"""Check whether any element in iterable matches regex."""
|
||||||
|
return any(regex.match(elem) for elem in iterable)
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleInfo(object):
|
||||||
|
def __init__(self, module_info_path):
|
||||||
|
with open(module_info_path, 'r') as module_info_file:
|
||||||
|
self._json = json.load(module_info_file)
|
||||||
|
|
||||||
|
|
||||||
|
def list(self, installed_filter=None, module_definition_filter=None):
|
||||||
|
for name, info in self._json.items():
|
||||||
|
installs = info['installed']
|
||||||
|
paths = info['path']
|
||||||
|
|
||||||
|
if installed_filter and not match_any(installed_filter, installs):
|
||||||
|
continue
|
||||||
|
if module_definition_filter and \
|
||||||
|
not match_any(module_definition_filter, paths):
|
||||||
|
continue
|
||||||
|
|
||||||
|
for install, path in itertools.product(installs, paths):
|
||||||
|
yield (install, path)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_args():
|
||||||
|
"""Parse command line arguments"""
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
|
||||||
|
parser.add_argument('module_info', help='Path to module-info.json')
|
||||||
|
|
||||||
|
parser.add_argument('--out-dir', default='out',
|
||||||
|
help='Android build output directory')
|
||||||
|
|
||||||
|
parser.add_argument('--installed-filter',
|
||||||
|
help='Installation filter (regular expression)')
|
||||||
|
|
||||||
|
parser.add_argument('--module-definition-filter',
|
||||||
|
help='Module definition filter (regular expression)')
|
||||||
|
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main function"""
|
||||||
|
|
||||||
|
args = _parse_args()
|
||||||
|
|
||||||
|
installed_filter = None
|
||||||
|
if args.installed_filter:
|
||||||
|
installed_filter = re.compile(
|
||||||
|
re.escape(posixpath.normpath(args.out_dir)) + '/' +
|
||||||
|
'(?:' + args.installed_filter + ')')
|
||||||
|
|
||||||
|
module_definition_filter = None
|
||||||
|
if args.module_definition_filter:
|
||||||
|
module_definition_filter = re.compile(args.module_definition_filter)
|
||||||
|
|
||||||
|
module_info = ModuleInfo(args.module_info)
|
||||||
|
|
||||||
|
for installed_file, module_path in \
|
||||||
|
module_info.list(installed_filter, module_definition_filter):
|
||||||
|
print(installed_file, module_path)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
139
vndk/tools/sourcedr/sourcedr/list_source_file.py
Executable file
139
vndk/tools/sourcedr/sourcedr/list_source_file.py
Executable file
@@ -0,0 +1,139 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""List all source file of a installed module."""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import itertools
|
||||||
|
import posixpath
|
||||||
|
import re
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cPickle as pickle # Python 2
|
||||||
|
except ImportError:
|
||||||
|
import pickle # Python 3
|
||||||
|
|
||||||
|
import ninja
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_args():
|
||||||
|
"""Parse the command line arguments."""
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
|
||||||
|
# Ninja input file options
|
||||||
|
parser.add_argument('input_file', help='input ninja file')
|
||||||
|
parser.add_argument('--ninja-deps', help='.ninja_deps file')
|
||||||
|
parser.add_argument('--cwd', help='working directory for ninja')
|
||||||
|
parser.add_argument('--encoding', default='utf-8',
|
||||||
|
help='ninja file encoding')
|
||||||
|
|
||||||
|
# Options
|
||||||
|
parser.add_argument(
|
||||||
|
'installed_filter', nargs='+',
|
||||||
|
help='path filter for installed files (w.r.t. device root)')
|
||||||
|
parser.add_argument(
|
||||||
|
'--out-dir', default='out', help='path to output directory')
|
||||||
|
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def _load_manifest_from_args(args):
|
||||||
|
"""Load the ninja file specified in the command line arguments."""
|
||||||
|
|
||||||
|
input_file = args.input_file
|
||||||
|
|
||||||
|
# If the input file name ends with `.pickle`, load it with pickle.load().
|
||||||
|
if input_file.endswith('.pickle'):
|
||||||
|
with open(input_file, 'rb') as pickle_file:
|
||||||
|
return pickle.load(pickle_file)
|
||||||
|
|
||||||
|
# Parse the ninja file
|
||||||
|
ninja_parser = ninja.Parser(args.cwd)
|
||||||
|
return ninja_parser.parse(input_file, args.encoding, args.ninja_deps)
|
||||||
|
|
||||||
|
|
||||||
|
def collect_source_files(graph, start, out_dir_pattern, out_host_dir_pattern):
|
||||||
|
"""Collect the transitive dependencies of a target."""
|
||||||
|
|
||||||
|
source_files = []
|
||||||
|
|
||||||
|
# Extract the file name of the target file. We need this file name to
|
||||||
|
# allow the strip/copy build rules while leaving other shared libraries
|
||||||
|
# alone.
|
||||||
|
start_basename = posixpath.basename(start)
|
||||||
|
|
||||||
|
# Collect all source files
|
||||||
|
visited = {start}
|
||||||
|
stack = [start]
|
||||||
|
while stack:
|
||||||
|
cur = stack.pop()
|
||||||
|
|
||||||
|
if not out_dir_pattern.match(cur):
|
||||||
|
source_files.append(cur)
|
||||||
|
|
||||||
|
build = graph.get(cur)
|
||||||
|
if build:
|
||||||
|
for dep in itertools.chain(build.explicit_ins, build.implicit_ins,
|
||||||
|
build.depfile_implicit_ins):
|
||||||
|
# Skip the binaries for build process
|
||||||
|
if dep.startswith('prebuilts/'):
|
||||||
|
continue
|
||||||
|
if out_host_dir_pattern.match(dep):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip the shared libraries
|
||||||
|
if dep.endswith('.toc'):
|
||||||
|
continue
|
||||||
|
if dep.endswith('.so'):
|
||||||
|
if posixpath.basename(dep) != start_basename:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if dep not in visited:
|
||||||
|
visited.add(dep)
|
||||||
|
stack.append(dep)
|
||||||
|
|
||||||
|
return sorted(source_files)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = _parse_args()
|
||||||
|
|
||||||
|
out_dir = posixpath.normpath(args.out_dir)
|
||||||
|
out_dir_pattern = re.compile(re.escape(out_dir) + '/')
|
||||||
|
out_host_dir_pattern = re.compile(re.escape(out_dir) + '/host/')
|
||||||
|
out_product_dir = out_dir + '/target/product/[^/]+'
|
||||||
|
|
||||||
|
def _normalize_path(path):
|
||||||
|
if path.startswith(out_dir + '/target'):
|
||||||
|
return path
|
||||||
|
return posixpath.join(out_product_dir, path)
|
||||||
|
|
||||||
|
installed_filter = [_normalize_path(path) for path in args.installed_filter]
|
||||||
|
installed_filter = re.compile(
|
||||||
|
'|'.join('(?:' + p + ')' for p in installed_filter))
|
||||||
|
|
||||||
|
manifest = _load_manifest_from_args(args)
|
||||||
|
|
||||||
|
# Build lookup map
|
||||||
|
graph = {}
|
||||||
|
for build in manifest.builds:
|
||||||
|
for path in build.explicit_outs:
|
||||||
|
graph[path] = build
|
||||||
|
for path in build.implicit_outs:
|
||||||
|
graph[path] = build
|
||||||
|
|
||||||
|
# Collect all matching outputs
|
||||||
|
matched_files = [path for path in graph if installed_filter.match(path)]
|
||||||
|
matched_files.sort()
|
||||||
|
|
||||||
|
for path in matched_files:
|
||||||
|
source_files = collect_source_files(
|
||||||
|
graph, path, out_dir_pattern, out_host_dir_pattern)
|
||||||
|
print(path)
|
||||||
|
for dep in source_files:
|
||||||
|
print('\t' + dep)
|
||||||
|
print()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user