aboutsummaryrefslogtreecommitdiff
path: root/core/api_assembly.py
blob: 0844d4b6d34227afaab45e7be89df33018f63b13 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import collections
import json
import os
import sys

from cc.api_assembly import CcApiAssemblyContext
from java.api_assembly import JavaApiAssemblyContext
from build_file_generator import BuildFileGenerator
import ninja_tools

ContributionData = collections.namedtuple("ContributionData",
                                          ("inner_tree", "json_data"))


def assemble_apis(context, inner_trees):
    # Find all of the contributions from the inner tree
    contribution_files_dict = inner_trees.for_each_tree(
        api_contribution_files_for_inner_tree)

    # Load and validate the contribution files
    # TODO: Check timestamps and skip unnecessary work
    contributions = []
    for tree_key, filenames in contribution_files_dict.items():
        for filename in filenames:
            json_data = load_contribution_file(context, filename)
            if not json_data:
                continue
            # TODO: Validate the configs, especially that the domains match what we asked for
            # from the lunch config.
            contributions.append(
                ContributionData(inner_trees.get(tree_key), json_data))

    # Group contributions by language and API surface
    stub_libraries = collate_contributions(contributions)

    # Initialize the build file writer
    build_file_generator = BuildFileGenerator()

    # Initialize the ninja file writer
    with open(context.out.api_ninja_file(), "w",
              encoding='iso-8859-1') as ninja_file:
        ninja = ninja_tools.Ninja(context, ninja_file)

        # Iterate through all of the stub libraries and generate rules to assemble them
        # and Android.bp/BUILD files to make those available to inner trees.
        # TODO: Parallelize? Skip unnecessary work?
        for stub_library in stub_libraries:
            # TODO (b/265962882): Export APIs of version < current.
            # API files of older versions (29,30,...) are currently not
            # available in out/api_surfaces.
            # This cause isssues during CC API import, since Soong
            # cannot resolve the dependency for rdeps that specify
            # `sdk_version:<num>`.
            # Create a short-term hack that unconditionally generates Soong
            # modules for all NDK libraries, starting from version=1.
            # This does not compromise on API correctness though, since the correct
            # version number will be passed to the ndkstubgen invocation.
            # TODO(b/266830850): Revisit stubs versioning for Module-lib API
            # surface.
            if stub_library.language == "cc_libraries" and stub_library.api_surface in ["publicapi", "module-libapi"]:
                versions = list(range(1,34)) # 34 is current
                versions.append("current")
                for version in versions:
                    stub_library.api_surface_version = str(version)
                    STUB_LANGUAGE_HANDLERS[stub_library.language](
                        context, ninja, build_file_generator, stub_library)
            else:
                STUB_LANGUAGE_HANDLERS[stub_library.language](context, ninja,
                                                          build_file_generator,
                                                          stub_library)

        # TODO: Handle host_executables separately or as a StubLibrary language?

        # Finish writing the ninja file
        ninja.write()

    build_file_generator.write()
    build_file_generator.clean(
        context.out.api_surfaces_dir())  # delete stale Android.bp files


def api_contribution_files_for_inner_tree(tree_key, inner_tree, cookie):
    "Scan an inner_tree's out dir for the api contribution files."
    directory = inner_tree.out.api_contributions_dir()
    result = []
    with os.scandir(directory) as it:
        for dirent in it:
            if not dirent.is_file():
                break
            if dirent.name.endswith(".json"):
                result.append(os.path.join(directory, dirent.name))
    return result


def load_contribution_file(context, filename):
    "Load and return the API contribution at filename. On error report error and return None."
    with open(filename, encoding='iso-8859-1') as f:
        try:
            return json.load(f)
        except json.decoder.JSONDecodeError as ex:
            # TODO: Error reporting
            context.errors.error(ex.msg, filename, ex.lineno, ex.colno)
            raise ex


class StubLibraryContribution(object):

    def __init__(self, inner_tree, api_domain, library_contribution):
        self.inner_tree = inner_tree
        self.api_domain = api_domain
        self.library_contribution = library_contribution


class StubLibrary(object):

    def __init__(self, language, api_surface, api_surface_version, name):
        self.language = language
        self.api_surface = api_surface
        self.api_surface_version = api_surface_version
        self.name = name
        self.contributions = []

    def add_contribution(self, contrib):
        self.contributions.append(contrib)


def collate_contributions(contributions):
    """Take the list of parsed API contribution files, and group targets by API Surface, version,
    language and library name, and return a StubLibrary object for each of those.
    """
    grouped = {}
    for contribution in contributions:
        for language in STUB_LANGUAGE_HANDLERS.keys():
            for library in contribution.json_data.get(language, []):
                key = (language, contribution.json_data["name"],
                       contribution.json_data["version"], library["name"])
                stub_library = grouped.get(key)
                if not stub_library:
                    stub_library = StubLibrary(
                        language, contribution.json_data["name"],
                        contribution.json_data["version"], library["name"])
                    grouped[key] = stub_library
                stub_library.add_contribution(
                    StubLibraryContribution(
                        contribution.inner_tree,
                        contribution.json_data["api_domain"], library))
    return list(grouped.values())


def assemble_resource_api_library(context, ninja, build_file, stub_library):
    print("assembling resource_api_library %s-%s %s from:" %
          (stub_library.api_surface, stub_library.api_surface_version,
           stub_library.name))
    for contrib in stub_library.contributions:
        print("  %s %s" %
              (contrib.api_domain, contrib.library_contribution["api"]))
    # TODO: Implement me


STUB_LANGUAGE_HANDLERS = {
    "cc_libraries": CcApiAssemblyContext().get_cc_api_assembler(),
    "java_libraries": JavaApiAssemblyContext().get_java_api_assembler(),
    "resource_libraries": assemble_resource_api_library,
}