summaryrefslogtreecommitdiff
path: root/cbuildbot/builders/generic_builders.py
blob: 5614774b85e847e2cd83ceab4e14e794084cc12f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
# Copyright 2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

"""Module containing the generic builders."""

from __future__ import print_function

import multiprocessing
import os
import sys
import tempfile
import traceback

from chromite.cbuildbot import constants
from chromite.cbuildbot import failures_lib
from chromite.cbuildbot import results_lib
from chromite.cbuildbot import trybot_patch_pool
from chromite.cbuildbot.stages import build_stages
from chromite.cbuildbot.stages import report_stages
from chromite.cbuildbot.stages import sync_stages
from chromite.lib import cidb
from chromite.lib import commandline
from chromite.lib import cros_build_lib
from chromite.lib import cros_logging as logging
from chromite.lib import git
from chromite.lib import parallel


class Builder(object):
  """Parent class for all builder types.

  This class functions as an abstract parent class for various build types.
  Its intended use is builder_instance.Run().

  Attributes:
    _run: The BuilderRun object for this run.
    archive_stages: Dict of BuildConfig keys to ArchiveStage values.
    patch_pool: TrybotPatchPool.
  """

  def __init__(self, builder_run):
    """Initializes instance variables. Must be called by all subclasses."""
    self._run = builder_run

    # TODO: all the fields below should not be part of the generic builder.
    # We need to restructure our SimpleBuilder and see about creating a new
    # base in there for holding them.
    if self._run.config.chromeos_official:
      os.environ['CHROMEOS_OFFICIAL'] = '1'

    self.archive_stages = {}
    self.patch_pool = trybot_patch_pool.TrybotPatchPool()
    self._build_image_lock = multiprocessing.Lock()

  def Initialize(self):
    """Runs through the initialization steps of an actual build."""
    if self._run.options.resume:
      results_lib.LoadCheckpoint(self._run.buildroot)

    self._RunStage(report_stages.BuildStartStage)

    self._RunStage(build_stages.CleanUpStage)

  def _GetStageInstance(self, stage, *args, **kwargs):
    """Helper function to get a stage instance given the args.

    Useful as almost all stages just take in builder_run.
    """
    # Normally the default BuilderRun (self._run) is used, but it can
    # be overridden with "builder_run" kwargs (e.g. for child configs).
    builder_run = kwargs.pop('builder_run', self._run)
    return stage(builder_run, *args, **kwargs)

  def _SetReleaseTag(self):
    """Sets run.attrs.release_tag from the manifest manager used in sync.

    Must be run after sync stage as syncing enables us to have a release tag,
    and must be run before any usage of attrs.release_tag.

    TODO(mtennant): Find a bottleneck place in syncing that can set this
    directly.  Be careful, as there are several kinds of syncing stages, and
    sync stages have been known to abort with sys.exit calls.
    """
    manifest_manager = getattr(self._run.attrs, 'manifest_manager', None)
    if manifest_manager:
      self._run.attrs.release_tag = manifest_manager.current_version
    else:
      self._run.attrs.release_tag = None

    logging.debug('Saved release_tag value for run: %r',
                  self._run.attrs.release_tag)

  def _RunStage(self, stage, *args, **kwargs):
    """Wrapper to run a stage.

    Args:
      stage: A BuilderStage class.
      args: args to pass to stage constructor.
      kwargs: kwargs to pass to stage constructor.

    Returns:
      Whatever the stage's Run method returns.
    """
    stage_instance = self._GetStageInstance(stage, *args, **kwargs)
    return stage_instance.Run()

  @staticmethod
  def _RunParallelStages(stage_objs):
    """Run the specified stages in parallel.

    Args:
      stage_objs: BuilderStage objects.
    """
    steps = [stage.Run for stage in stage_objs]
    try:
      parallel.RunParallelSteps(steps)

    except BaseException as ex:
      # If a stage threw an exception, it might not have correctly reported
      # results (e.g. because it was killed before it could report the
      # results.) In this case, attribute the exception to any stages that
      # didn't report back correctly (if any).
      for stage in stage_objs:
        for name in stage.GetStageNames():
          if not results_lib.Results.StageHasResults(name):
            results_lib.Results.Record(name, ex, str(ex))

      if cidb.CIDBConnectionFactory.IsCIDBSetup():
        db = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
        if db:
          for stage in stage_objs:
            for build_stage_id in stage.GetBuildStageIDs():
              if not db.HasBuildStageFailed(build_stage_id):
                failures_lib.ReportStageFailureToCIDB(db,
                                                      build_stage_id,
                                                      ex)

      raise

  def _RunSyncStage(self, sync_instance):
    """Run given |sync_instance| stage and be sure attrs.release_tag set."""
    try:
      sync_instance.Run()
    finally:
      self._SetReleaseTag()

  def SetVersionInfo(self):
    """Sync the builder's version info with the buildbot runtime."""
    self._run.attrs.version_info = self.GetVersionInfo()

  def GetVersionInfo(self):
    """Returns a manifest_version.VersionInfo object for this build.

    Subclasses must override this method.
    """
    raise NotImplementedError()

  def GetSyncInstance(self):
    """Returns an instance of a SyncStage that should be run.

    Subclasses must override this method.
    """
    raise NotImplementedError()

  def GetCompletionInstance(self):
    """Returns the MasterSlaveSyncCompletionStage for this build.

    Subclasses may override this method.

    Returns:
      None
    """
    return None

  def RunStages(self):
    """Subclasses must override this method.  Runs the appropriate code."""
    raise NotImplementedError()

  def _ReExecuteInBuildroot(self, sync_instance):
    """Reexecutes self in buildroot and returns True if build succeeds.

    This allows the buildbot code to test itself when changes are patched for
    buildbot-related code.  This is a no-op if the buildroot == buildroot
    of the running chromite checkout.

    Args:
      sync_instance: Instance of the sync stage that was run to sync.

    Returns:
      True if the Build succeeded.
    """
    if not self._run.options.resume:
      results_lib.WriteCheckpoint(self._run.options.buildroot)

    args = sync_stages.BootstrapStage.FilterArgsForTargetCbuildbot(
        self._run.options.buildroot, constants.PATH_TO_CBUILDBOT,
        self._run.options)

    # Specify a buildroot explicitly (just in case, for local trybot).
    # Suppress any timeout options given from the commandline in the
    # invoked cbuildbot; our timeout will enforce it instead.
    args += ['--resume', '--timeout', '0', '--notee', '--nocgroups',
             '--buildroot', os.path.abspath(self._run.options.buildroot)]

    # Set --version. Note that --version isn't legal without --buildbot.
    if (self._run.options.buildbot and
        hasattr(self._run.attrs, 'manifest_manager')):
      ver = self._run.attrs.manifest_manager.current_version
      args += ['--version', ver]

    pool = getattr(sync_instance, 'pool', None)
    if pool:
      filename = os.path.join(self._run.options.buildroot,
                              'validation_pool.dump')
      pool.Save(filename)
      args += ['--validation_pool', filename]

    # Reset the cache dir so that the child will calculate it automatically.
    if not self._run.options.cache_dir_specified:
      commandline.BaseParser.ConfigureCacheDir(None)

    with tempfile.NamedTemporaryFile(prefix='metadata') as metadata_file:
      metadata_file.write(self._run.attrs.metadata.GetJSON())
      metadata_file.flush()
      args += ['--metadata_dump', metadata_file.name]

      # Re-run the command in the buildroot.
      # Finally, be generous and give the invoked cbuildbot 30s to shutdown
      # when something occurs.  It should exit quicker, but the sigterm may
      # hit while the system is particularly busy.
      return_obj = cros_build_lib.RunCommand(
          args, cwd=self._run.options.buildroot, error_code_ok=True,
          kill_timeout=30)
      return return_obj.returncode == 0

  def _InitializeTrybotPatchPool(self):
    """Generate patch pool from patches specified on the command line.

    Do this only if we need to patch changes later on.
    """
    changes_stage = sync_stages.PatchChangesStage.StageNamePrefix()
    check_func = results_lib.Results.PreviouslyCompletedRecord
    if not check_func(changes_stage) or self._run.options.bootstrap:
      options = self._run.options
      self.patch_pool = trybot_patch_pool.TrybotPatchPool.FromOptions(
          gerrit_patches=options.gerrit_patches,
          local_patches=options.local_patches,
          sourceroot=options.sourceroot,
          remote_patches=options.remote_patches)

  def _GetBootstrapStage(self):
    """Constructs and returns the BootStrapStage object.

    We return None when there are no chromite patches to test, and
    --test-bootstrap wasn't passed in.
    """
    stage = None

    patches_needed = sync_stages.BootstrapStage.BootstrapPatchesNeeded(
        self._run, self.patch_pool)

    chromite_branch = git.GetChromiteTrackingBranch()

    if (patches_needed or
        self._run.options.test_bootstrap or
        chromite_branch != self._run.options.branch):
      stage = sync_stages.BootstrapStage(self._run, self.patch_pool)
    return stage

  def Run(self):
    """Main runner for this builder class.  Runs build and prints summary.

    Returns:
      Whether the build succeeded.
    """
    self._InitializeTrybotPatchPool()

    if self._run.options.bootstrap:
      bootstrap_stage = self._GetBootstrapStage()
      if bootstrap_stage:
        # BootstrapStage blocks on re-execution of cbuildbot.
        bootstrap_stage.Run()
        return bootstrap_stage.returncode == 0

    print_report = True
    exception_thrown = False
    success = True
    sync_instance = None
    try:
      self.Initialize()
      sync_instance = self.GetSyncInstance()
      self._RunSyncStage(sync_instance)

      if self._run.ShouldPatchAfterSync():
        # Filter out patches to manifest, since PatchChangesStage can't handle
        # them.  Manifest patches are patched in the BootstrapStage.
        non_manifest_patches = self.patch_pool.FilterManifest(negate=True)
        if non_manifest_patches:
          self._RunStage(sync_stages.PatchChangesStage, non_manifest_patches)

      # Now that we have a fully synced & patched tree, we can let the builder
      # extract version information from the sources for this particular build.
      self.SetVersionInfo()
      if self._run.ShouldReexecAfterSync():
        print_report = False
        success = self._ReExecuteInBuildroot(sync_instance)
      else:
        self._RunStage(report_stages.BuildReexecutionFinishedStage)
        self.RunStages()

    except Exception as ex:
      exception_thrown = True
      if results_lib.Results.BuildSucceededSoFar():
        # If the build is marked as successful, but threw exceptions, that's a
        # problem. Print the traceback for debugging.
        if isinstance(ex, failures_lib.CompoundFailure):
          print(str(ex))

        traceback.print_exc(file=sys.stdout)
        raise

      if not (print_report and isinstance(ex, failures_lib.StepFailure)):
        # If the failed build threw a non-StepFailure exception, we
        # should raise it.
        raise

    finally:
      if print_report:
        results_lib.WriteCheckpoint(self._run.options.buildroot)
        completion_instance = self.GetCompletionInstance()
        self._RunStage(report_stages.ReportStage, completion_instance)
        success = results_lib.Results.BuildSucceededSoFar()
        if exception_thrown and success:
          success = False
          logging.PrintBuildbotStepWarnings()
          print("""\
Exception thrown, but all stages marked successful. This is an internal error,
because the stage that threw the exception should be marked as failing.""")

    return success