xref: /OK3568_Linux_fs/buildroot/utils/getdeveloperlib.py (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1from io import open
2import os
3import re
4import glob
5import subprocess
6import sys
7import unittest
8
9brpath = os.path.normpath(os.path.join(os.path.dirname(__file__), ".."))
10
11#
12# Patch parsing functions
13#
14
15FIND_INFRA_IN_PATCH = re.compile(r"^\+\$\(eval \$\((host-)?([^-]*)-package\)\)$")
16
17
18def analyze_patch(patch):
19    """Parse one patch and return the list of files modified, added or
20    removed by the patch."""
21    files = set()
22    infras = set()
23    for line in patch:
24        # If the patch is adding a package, find which infra it is
25        m = FIND_INFRA_IN_PATCH.match(line)
26        if m:
27            infras.add(m.group(2))
28        if not line.startswith("+++ "):
29            continue
30        line.strip()
31        fname = line[line.find("/") + 1:].strip()
32        if fname == "dev/null":
33            continue
34        files.add(fname)
35    return (files, infras)
36
37
38FIND_INFRA_IN_MK = re.compile(r"^\$\(eval \$\((host-)?([^-]*)-package\)\)$")
39
40
41def fname_get_package_infra(fname):
42    """Checks whether the file name passed as argument is a Buildroot .mk
43    file describing a package, and find the infrastructure it's using."""
44    if not fname.endswith(".mk"):
45        return None
46
47    if not os.path.exists(fname):
48        return None
49
50    with open(fname, "r") as f:
51        for line in f:
52            line = line.strip()
53            m = FIND_INFRA_IN_MK.match(line)
54            if m:
55                return m.group(2)
56    return None
57
58
59def analyze_patches(patches):
60    """Parse a list of patches and returns the list of files modified,
61    added or removed by the patches, as well as the list of package
62    infrastructures used by those patches (if any)"""
63    allfiles = set()
64    allinfras = set()
65    for patch in patches:
66        (files, infras) = analyze_patch(patch)
67        allfiles = allfiles | files
68        allinfras = allinfras | infras
69    return (allfiles, allinfras)
70
71
72#
73# Unit-test parsing functions
74#
75
76def get_all_test_cases(suite):
77    """Generate all test-cases from a given test-suite.
78    :return: (test.module, test.name)"""
79    if issubclass(type(suite), unittest.TestSuite):
80        for test in suite:
81            for res in get_all_test_cases(test):
82                yield res
83    else:
84        yield (suite.__module__, suite.__class__.__name__)
85
86
87def list_unittests():
88    """Use the unittest module to retreive all test cases from a given
89    directory"""
90    loader = unittest.TestLoader()
91    suite = loader.discover(os.path.join(brpath, "support", "testing"))
92    tests = {}
93    for module, test in get_all_test_cases(suite):
94        module_path = os.path.join("support", "testing", *module.split('.'))
95        tests.setdefault(module_path, []).append('%s.%s' % (module, test))
96    return tests
97
98
99unittests = {}
100
101
102#
103# DEVELOPERS file parsing functions
104#
105
106class Developer:
107    def __init__(self, name, files):
108        self.name = name
109        self.files = files
110        self.packages = parse_developer_packages(files)
111        self.architectures = parse_developer_architectures(files)
112        self.infras = parse_developer_infras(files)
113        self.runtime_tests = parse_developer_runtime_tests(files)
114        self.defconfigs = parse_developer_defconfigs(files)
115
116    def hasfile(self, f):
117        for fs in self.files:
118            if f.startswith(fs):
119                return True
120        return False
121
122    def __repr__(self):
123        name = '\'' + self.name.split(' <')[0][:20] + '\''
124        things = []
125        if len(self.files):
126            things.append('{} files'.format(len(self.files)))
127        if len(self.packages):
128            things.append('{} pkgs'.format(len(self.packages)))
129        if len(self.architectures):
130            things.append('{} archs'.format(len(self.architectures)))
131        if len(self.infras):
132            things.append('{} infras'.format(len(self.infras)))
133        if len(self.runtime_tests):
134            things.append('{} tests'.format(len(self.runtime_tests)))
135        if len(self.defconfigs):
136            things.append('{} defconfigs'.format(len(self.defconfigs)))
137        if things:
138            return 'Developer <{} ({})>'.format(name, ', '.join(things))
139        else:
140            return 'Developer <' + name + '>'
141
142
143def parse_developer_packages(fnames):
144    """Given a list of file patterns, travel through the Buildroot source
145    tree to find which packages are implemented by those file
146    patterns, and return a list of those packages."""
147    packages = set()
148    for fname in fnames:
149        for root, dirs, files in os.walk(os.path.join(brpath, fname)):
150            for f in files:
151                path = os.path.join(root, f)
152                if fname_get_package_infra(path):
153                    pkg = os.path.splitext(f)[0]
154                    packages.add(pkg)
155    return packages
156
157
158def parse_arches_from_config_in(fname):
159    """Given a path to an arch/Config.in.* file, parse it to get the list
160    of BR2_ARCH values for this architecture."""
161    arches = set()
162    with open(fname, "r") as f:
163        parsing_arches = False
164        for line in f:
165            line = line.strip()
166            if line == "config BR2_ARCH":
167                parsing_arches = True
168                continue
169            if parsing_arches:
170                m = re.match(r"^\s*default \"([^\"]*)\".*", line)
171                if m:
172                    arches.add(m.group(1))
173                else:
174                    parsing_arches = False
175    return arches
176
177
178def parse_developer_architectures(fnames):
179    """Given a list of file names, find the ones starting by
180    'arch/Config.in.', and use that to determine the architecture a
181    developer is working on."""
182    arches = set()
183    for fname in fnames:
184        if not re.match(r"^.*/arch/Config\.in\..*$", fname):
185            continue
186        arches = arches | parse_arches_from_config_in(fname)
187    return arches
188
189
190def parse_developer_infras(fnames):
191    infras = set()
192    for fname in fnames:
193        m = re.match(r"^package/pkg-([^.]*).mk$", fname)
194        if m:
195            infras.add(m.group(1))
196    return infras
197
198
199def parse_developer_defconfigs(fnames):
200    """Given a list of file names, returns the config names
201    corresponding to defconfigs."""
202    return {os.path.basename(fname[:-10])
203            for fname in fnames
204            if fname.endswith('_defconfig')}
205
206
207def parse_developer_runtime_tests(fnames):
208    """Given a list of file names, returns the runtime tests
209    corresponding to the file."""
210    all_files = []
211    # List all files recursively
212    for fname in fnames:
213        if os.path.isdir(fname):
214            for root, _dirs, files in os.walk(os.path.join(brpath, fname)):
215                all_files += [os.path.join(root, f) for f in files]
216        else:
217            all_files.append(fname)
218
219    # Get all runtime tests
220    runtimes = set()
221    for f in all_files:
222        name = os.path.splitext(f)[0]
223        if name in unittests:
224            runtimes |= set(unittests[name])
225    return runtimes
226
227
228def parse_developers():
229    """Parse the DEVELOPERS file and return a list of Developer objects."""
230    developers = []
231    linen = 0
232    global unittests
233    unittests = list_unittests()
234    developers_fname = os.path.join(brpath, 'DEVELOPERS')
235    with open(developers_fname, mode='r', encoding='utf_8') as f:
236        files = []
237        name = None
238        for line in f:
239            line = line.strip()
240            if line.startswith("#"):
241                continue
242            elif line.startswith("N:"):
243                if name is not None or len(files) != 0:
244                    print("Syntax error in DEVELOPERS file, line %d" % linen,
245                          file=sys.stderr)
246                name = line[2:].strip()
247            elif line.startswith("F:"):
248                fname = line[2:].strip()
249                dev_files = glob.glob(os.path.join(brpath, fname))
250                if len(dev_files) == 0:
251                    print("WARNING: '%s' doesn't match any file" % fname,
252                          file=sys.stderr)
253                for f in dev_files:
254                    dev_file = os.path.relpath(f, brpath)
255                    dev_file = dev_file.replace(os.sep, '/')  # force unix sep
256                    files.append(dev_file)
257            elif line == "":
258                if not name:
259                    continue
260                developers.append(Developer(name, files))
261                files = []
262                name = None
263            else:
264                print("Syntax error in DEVELOPERS file, line %d: '%s'" % (linen, line),
265                      file=sys.stderr)
266                return None
267            linen += 1
268    # handle last developer
269    if name is not None:
270        developers.append(Developer(name, files))
271    return developers
272
273
274def check_developers(developers, basepath=None):
275    """Look at the list of files versioned in Buildroot, and returns the
276    list of files that are not handled by any developer"""
277    if basepath is None:
278        basepath = os.getcwd()
279    cmd = ["git", "--git-dir", os.path.join(basepath, ".git"), "ls-files"]
280    files = subprocess.check_output(cmd).decode(sys.stdout.encoding).strip().split("\n")
281    unhandled_files = []
282    for f in files:
283        handled = False
284        for d in developers:
285            if d.hasfile(f):
286                handled = True
287                break
288        if not handled:
289            unhandled_files.append(f)
290    return unhandled_files
291