_context.py 14.5 KB
Newer Older
1 2
#!/usr/bin/env python3
#
3
#  Copyright (C) 2016-2018 Codethink Limited
4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
#
#  This program is free software; you can redistribute it and/or
#  modify it under the terms of the GNU Lesser General Public
#  License as published by the Free Software Foundation; either
#  version 2 of the License, or (at your option) any later version.
#
#  This library is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
#  Lesser General Public License for more details.
#
#  You should have received a copy of the GNU Lesser General Public
#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
#  Authors:
#        Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>

21
import os
22
import datetime
23
from collections import deque, Mapping
24
from contextlib import contextmanager
25
from . import _cachekey
26
from . import _signals
27
from . import _site
28
from . import _yaml
29 30
from ._exceptions import LoadError, LoadErrorReason, BstError
from ._message import Message, MessageType
31
from ._profile import Topics, profile_start, profile_end
32
from ._artifactcache import ArtifactCache
33
from ._workspaces import Workspaces
34

35

36 37 38 39 40 41 42 43 44 45 46
# Context()
#
# The Context object holds all of the user preferences
# and context for a given invocation of BuildStream.
#
# This is a collection of data from configuration files and command
# line arguments and consists of information such as where to store
# logs and artifacts, where to perform builds and cache downloaded sources,
# verbosity levels and basically anything pertaining to the context
# in which BuildStream was invoked.
#
47
class Context():
48

49
    def __init__(self):
50

51
        # Filename indicating which configuration file was used, or None for the defaults
52 53
        self.config_origin = None

54
        # The directory where various sources are stored
55 56
        self.sourcedir = None

57
        # The directory where build sandboxes will be created
58 59
        self.builddir = None

60
        # The local binary artifact cache directory
61 62
        self.artifactdir = None

63 64
        # The locations from which to push and pull prebuilt artifacts
        self.artifact_cache_specs = []
65

66
        # The directory to store build logs
67 68
        self.logdir = None

69
        # The abbreviated cache key length to display in the UI
70 71
        self.log_key_length = 0

72
        # Whether debug mode is enabled
73 74
        self.log_debug = False

75
        # Whether verbose mode is enabled
76 77
        self.log_verbose = False

78
        # Maximum number of lines to print from build logs
79 80
        self.log_error_lines = 0

81
        # Maximum number of lines to print in the master log for a detailed message
82 83
        self.log_message_lines = 0

84
        # Format string for printing the pipeline at startup time
85 86
        self.log_element_format = None

87 88 89
        # Format string for printing message lines in the master log
        self.log_message_format = None

90
        # Maximum number of fetch or refresh tasks
91 92
        self.sched_fetchers = 4

93
        # Maximum number of build tasks
94 95
        self.sched_builders = 4

96
        # Maximum number of push tasks
97 98
        self.sched_pushers = 4

99
        # Maximum number of retries for network tasks
100 101
        self.sched_network_retries = 2

102
        # What to do when a build fails in non interactive mode
103 104
        self.sched_error_action = 'continue'

105 106 107
        # Whether elements must be rebuilt when their dependencies have changed
        self._strict_build_plan = None

108 109 110
        # Make sure the XDG vars are set in the environment before loading anything
        self._init_xdg()

111 112
        # Private variables
        self._cache_key = None
113
        self._message_handler = None
114
        self._message_depth = deque()
115
        self._projects = []
116
        self._project_overrides = {}
117
        self._workspaces = None
118

119 120 121 122 123 124 125 126 127 128 129 130 131 132
    # load()
    #
    # Loads the configuration files
    #
    # Args:
    #    config (filename): The user specified configuration file, if any
    #
    # Raises:
    #   LoadError
    #
    # This will first load the BuildStream default configuration and then
    # override that configuration with the configuration file indicated
    # by *config*, if any was specified.
    #
133
    def load(self, config=None):
134
        profile_start(Topics.LOAD_CONTEXT, 'load')
135

136 137 138 139
        # If a specific config file is not specified, default to trying
        # a $XDG_CONFIG_HOME/buildstream.conf file
        #
        if not config:
140 141
            default_config = os.path.join(os.environ['XDG_CONFIG_HOME'],
                                          'buildstream.conf')
142 143 144
            if os.path.exists(default_config):
                config = default_config

145
        # Load default config
146
        #
147
        defaults = _yaml.load(_site.default_user_config)
148

149
        if config:
150
            self.config_origin = os.path.abspath(config)
151
            user_config = _yaml.load(config)
152
            _yaml.composite(defaults, user_config)
153

154
        _yaml.node_validate(defaults, [
155 156
            'sourcedir', 'builddir', 'artifactdir', 'logdir',
            'scheduler', 'artifacts', 'logging', 'projects',
157 158
        ])

159
        for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
160 161 162
            # Allow the ~ tilde expansion and any environment variables in
            # path specification in the config files.
            #
163
            path = _yaml.node_get(defaults, str, directory)
164 165
            path = os.path.expanduser(path)
            path = os.path.expandvars(path)
166
            path = os.path.normpath(path)
167
            setattr(self, directory, path)
168

169
        # Load artifact share configuration
170
        self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
171

172
        # Load logging config
173
        logging = _yaml.node_get(defaults, Mapping, 'logging')
174
        _yaml.node_validate(logging, [
175 176
            'key-length', 'verbose',
            'error-lines', 'message-lines',
177
            'debug', 'element-format', 'message-format'
178
        ])
179
        self.log_key_length = _yaml.node_get(logging, int, 'key-length')
180 181 182
        self.log_debug = _yaml.node_get(logging, bool, 'debug')
        self.log_verbose = _yaml.node_get(logging, bool, 'verbose')
        self.log_error_lines = _yaml.node_get(logging, int, 'error-lines')
183
        self.log_message_lines = _yaml.node_get(logging, int, 'message-lines')
184
        self.log_element_format = _yaml.node_get(logging, str, 'element-format')
185
        self.log_message_format = _yaml.node_get(logging, str, 'message-format')
186

187
        # Load scheduler config
188
        scheduler = _yaml.node_get(defaults, Mapping, 'scheduler')
189
        _yaml.node_validate(scheduler, [
190 191 192
            'on-error', 'fetchers', 'builders',
            'pushers', 'network-retries'
        ])
193 194 195
        self.sched_error_action = _yaml.node_get(scheduler, str, 'on-error')
        self.sched_fetchers = _yaml.node_get(scheduler, int, 'fetchers')
        self.sched_builders = _yaml.node_get(scheduler, int, 'builders')
196
        self.sched_pushers = _yaml.node_get(scheduler, int, 'pushers')
197
        self.sched_network_retries = _yaml.node_get(scheduler, int, 'network-retries')
198

199
        # Load per-projects overrides
200 201 202 203
        self._project_overrides = _yaml.node_get(defaults, Mapping, 'projects', default_value={})

        # Shallow validation of overrides, parts of buildstream which rely
        # on the overrides are expected to validate elsewhere.
204
        for _, overrides in _yaml.node_items(self._project_overrides):
205
            _yaml.node_validate(overrides, ['artifacts', 'options', 'strict'])
206

207 208
        profile_end(Topics.LOAD_CONTEXT, 'load')

209 210 211 212 213 214 215
        valid_actions = ['continue', 'quit']
        if self.sched_error_action not in valid_actions:
            provenance = _yaml.node_get_provenance(scheduler, 'on-error')
            raise LoadError(LoadErrorReason.INVALID_DATA,
                            "{}: on-error should be one of: {}".format(
                                provenance, ", ".join(valid_actions)))

216
    # add_project():
217 218 219 220 221 222
    #
    # Add a project to the context.
    #
    # Args:
    #    project (Project): The project to add
    #
223
    def add_project(self, project):
224 225
        if not self._projects:
            self._workspaces = Workspaces(project)
226 227
        self._projects.append(project)

228
    # get_projects():
229 230 231 232 233 234
    #
    # Return the list of projects in the context.
    #
    # Returns:
    #    (list): The list of projects
    #
235
    def get_projects(self):
236 237
        return self._projects

238
    # get_toplevel_project():
239 240 241 242 243 244 245
    #
    # Return the toplevel project, the one which BuildStream was
    # invoked with as opposed to a junctioned subproject.
    #
    # Returns:
    #    (list): The list of projects
    #
246
    def get_toplevel_project(self):
247 248
        return self._projects[0]

249 250 251
    def get_workspaces(self):
        return self._workspaces

252
    # get_overrides():
253
    #
254 255 256
    # Fetch the override dictionary for the active project. This returns
    # a node loaded from YAML and as such, values loaded from the returned
    # node should be loaded using the _yaml.node_get() family of functions.
257
    #
258
    # Args:
259
    #    project_name (str): The project name
260 261 262 263
    #
    # Returns:
    #    (Mapping): The overrides dictionary for the specified project
    #
264
    def get_overrides(self, project_name):
265
        return _yaml.node_get(self._project_overrides, Mapping, project_name, default_value={})
266

267
    # get_strict():
268 269 270 271 272 273
    #
    # Fetch whether we are strict or not
    #
    # Returns:
    #    (bool): Whether or not to use strict build plan
    #
274
    def get_strict(self):
275 276

        # If it was set by the CLI, it overrides any config
277 278
        if self._strict_build_plan is not None:
            return self._strict_build_plan
279

280 281
        toplevel = self.get_toplevel_project()
        overrides = self.get_overrides(toplevel.name)
282 283
        return _yaml.node_get(overrides, bool, 'strict', default_value=True)

284
    # get_cache_key():
285 286 287 288 289 290
    #
    # Returns the cache key, calculating it if necessary
    #
    # Returns:
    #    (str): A hex digest cache key for the Context
    #
291
    def get_cache_key(self):
292 293 294
        if self._cache_key is None:

            # Anything that alters the build goes into the unique key
295
            self._cache_key = _cachekey.generate_key({})
296

297
        return self._cache_key
298

299
    # set_message_handler()
300 301 302 303 304
    #
    # Sets the handler for any status messages propagated through
    # the context.
    #
    # The message handler should have the same signature as
305 306
    # the message() method
    def set_message_handler(self, handler):
307 308
        self._message_handler = handler

309
    # silent_messages():
310
    #
311 312
    # Returns:
    #    (bool): Whether messages are currently being silenced
313
    #
314
    def silent_messages(self):
315 316 317 318
        for silent in self._message_depth:
            if silent:
                return True
        return False
319

320
    # message():
321
    #
322 323
    # Proxies a message back to the caller, this is the central
    # point through which all messages pass.
324 325
    #
    # Args:
326
    #    message: A Message object
327
    #
328
    def message(self, message):
329 330 331

        # Tag message only once
        if message.depth is None:
332
            message.depth = len(list(self._message_depth))
333

334 335 336
        # Send it off to the log handler (can be the frontend,
        # or it can be the child task which will log and propagate
        # to the frontend)
337
        assert self._message_handler
338 339 340

        self._message_handler(message, context=self)
        return
341

342
    # silence()
343 344 345 346 347 348 349
    #
    # A context manager to silence messages, this behaves in
    # the same way as the `silent_nested` argument of the
    # Context._timed_activity() context manager: especially
    # important messages will not be silenced.
    #
    @contextmanager
350
    def silence(self):
351
        self._push_message_depth(True)
352 353 354 355
        try:
            yield
        finally:
            self._pop_message_depth()
356

357
    # timed_activity()
358 359 360 361 362 363 364 365 366 367
    #
    # Context manager for performing timed activities and logging those
    #
    # Args:
    #    context (Context): The invocation context object
    #    activity_name (str): The name of the activity
    #    detail (str): An optional detailed message, can be multiline output
    #    silent_nested (bool): If specified, nested messages will be silenced
    #
    @contextmanager
368
    def timed_activity(self, activity_name, *, unique_id=None, detail=None, silent_nested=False):
369

370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385
        starttime = datetime.datetime.now()
        stopped_time = None

        def stop_time():
            nonlocal stopped_time
            stopped_time = datetime.datetime.now()

        def resume_time():
            nonlocal stopped_time
            nonlocal starttime
            sleep_time = datetime.datetime.now() - stopped_time
            starttime += sleep_time

        with _signals.suspendable(stop_time, resume_time):
            try:
                # Push activity depth for status messages
386
                message = Message(unique_id, MessageType.START, activity_name, detail=detail)
387
                self.message(message)
388 389 390
                self._push_message_depth(silent_nested)
                yield

391
            except BstError:
392 393 394
                # Note the failure in status messages and reraise, the scheduler
                # expects an error when there is an error.
                elapsed = datetime.datetime.now() - starttime
395
                message = Message(unique_id, MessageType.FAIL, activity_name, elapsed=elapsed)
396
                self._pop_message_depth()
397
                self.message(message)
398 399 400
                raise

            elapsed = datetime.datetime.now() - starttime
401
            message = Message(unique_id, MessageType.SUCCESS, activity_name, elapsed=elapsed)
402
            self._pop_message_depth()
403 404 405 406 407 408 409 410 411 412 413 414 415
            self.message(message)

    # _push_message_depth() / _pop_message_depth()
    #
    # For status messages, send the depth of timed
    # activities inside a given task through the message
    #
    def _push_message_depth(self, silent_nested):
        self._message_depth.appendleft(silent_nested)

    def _pop_message_depth(self):
        assert self._message_depth
        self._message_depth.popleft()
416

417 418 419 420 421 422 423 424 425 426 427
    # Force the resolved XDG variables into the environment,
    # this is so that they can be used directly to specify
    # preferred locations of things from user configuration
    # files.
    def _init_xdg(self):
        if not os.environ.get('XDG_CACHE_HOME'):
            os.environ['XDG_CACHE_HOME'] = os.path.expanduser('~/.cache')
        if not os.environ.get('XDG_CONFIG_HOME'):
            os.environ['XDG_CONFIG_HOME'] = os.path.expanduser('~/.config')
        if not os.environ.get('XDG_DATA_HOME'):
            os.environ['XDG_DATA_HOME'] = os.path.expanduser('~/.local/share')