Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (25)
Showing
with 417 additions and 75 deletions
...@@ -4,11 +4,15 @@ include = ...@@ -4,11 +4,15 @@ include =
*/buildstream/* */buildstream/*
omit = omit =
# Omit profiling helper module # Omit some internals
*/buildstream/_profile.py */buildstream/_profile.py
*/buildstream/__main__.py
*/buildstream/_version.py
# Omit generated code # Omit generated code
*/buildstream/_protos/* */buildstream/_protos/*
*/.eggs/* */.eggs/*
# Omit .tox directory
*/.tox/*
[report] [report]
show_missing = True show_missing = True
......
...@@ -13,11 +13,12 @@ tests/**/*.pyc ...@@ -13,11 +13,12 @@ tests/**/*.pyc
integration-cache/ integration-cache/
tmp tmp
.coverage .coverage
.coverage-reports/
.coverage.* .coverage.*
.cache .cache
.pytest_cache/ .pytest_cache/
*.bst/ *.bst/
.tox .tox/
# Pycache, in case buildstream is ran directly from within the source # Pycache, in case buildstream is ran directly from within the source
# tree # tree
......
...@@ -13,6 +13,7 @@ variables: ...@@ -13,6 +13,7 @@ variables:
PYTEST_ADDOPTS: "--color=yes" PYTEST_ADDOPTS: "--color=yes"
INTEGRATION_CACHE: "${CI_PROJECT_DIR}/cache/integration-cache" INTEGRATION_CACHE: "${CI_PROJECT_DIR}/cache/integration-cache"
TEST_COMMAND: "tox -- --color=yes --integration" TEST_COMMAND: "tox -- --color=yes --integration"
COVERAGE_PREFIX: "${CI_JOB_NAME}."
##################################################### #####################################################
...@@ -24,9 +25,6 @@ variables: ...@@ -24,9 +25,6 @@ variables:
.tests-template: &tests .tests-template: &tests
stage: test stage: test
variables:
COVERAGE_DIR: coverage-linux
before_script: before_script:
# Diagnostics # Diagnostics
- mount - mount
...@@ -40,14 +38,11 @@ variables: ...@@ -40,14 +38,11 @@ variables:
- su buildstream -c "${TEST_COMMAND}" - su buildstream -c "${TEST_COMMAND}"
after_script: after_script:
# Collect our reports
- mkdir -p ${COVERAGE_DIR}
- cp .coverage ${COVERAGE_DIR}/coverage."${CI_JOB_NAME}"
except: except:
- schedules - schedules
artifacts: artifacts:
paths: paths:
- ${COVERAGE_DIR} - .coverage-reports
tests-debian-9: tests-debian-9:
image: buildstream/testsuite-debian:9-5da27168-32c47d1c image: buildstream/testsuite-debian:9-5da27168-32c47d1c
...@@ -83,7 +78,6 @@ tests-unix: ...@@ -83,7 +78,6 @@ tests-unix:
<<: *tests <<: *tests
variables: variables:
BST_FORCE_BACKEND: "unix" BST_FORCE_BACKEND: "unix"
COVERAGE_DIR: coverage-unix
script: script:
...@@ -239,22 +233,22 @@ coverage: ...@@ -239,22 +233,22 @@ coverage:
stage: post stage: post
coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/' coverage: '/TOTAL +\d+ +\d+ +(\d+\.\d+)%/'
script: script:
- pip3 install -r requirements/requirements.txt -r requirements/dev-requirements.txt - cp -a .coverage-reports/ ./coverage-sources
- pip3 install --no-index . - tox -e coverage
- mkdir report - cp -a .coverage-reports/ ./coverage-report
- cd report
- cp ../coverage-unix/coverage.* .
- cp ../coverage-linux/coverage.* .
- ls coverage.*
- coverage combine --rcfile=../.coveragerc -a coverage.*
- coverage report --rcfile=../.coveragerc -m
dependencies: dependencies:
- tests-debian-9 - tests-debian-9
- tests-fedora-27 - tests-fedora-27
- tests-fedora-28 - tests-fedora-28
- tests-fedora-missing-deps
- tests-ubuntu-18.04
- tests-unix - tests-unix
except: except:
- schedules - schedules
artifacts:
paths:
- coverage-sources/
- coverage-report/
# Deploy, only for merges which land on master branch. # Deploy, only for merges which land on master branch.
# #
......
...@@ -553,7 +553,7 @@ One problem which arises from this is that we end up having symbols ...@@ -553,7 +553,7 @@ One problem which arises from this is that we end up having symbols
which are *public* according to the :ref:`rules discussed in the previous section which are *public* according to the :ref:`rules discussed in the previous section
<contributing_public_and_private>`, but must be hidden away from the <contributing_public_and_private>`, but must be hidden away from the
*"Public API Surface"*. For example, BuildStream internal classes need *"Public API Surface"*. For example, BuildStream internal classes need
to invoke methods on the ``Element`` and ``Source`` classes, wheras these to invoke methods on the ``Element`` and ``Source`` classes, whereas these
methods need to be hidden from the *"Public API Surface"*. methods need to be hidden from the *"Public API Surface"*.
This is where BuildStream deviates from the PEP-8 standard for public This is where BuildStream deviates from the PEP-8 standard for public
...@@ -631,7 +631,7 @@ An element plugin will derive from Element by importing:: ...@@ -631,7 +631,7 @@ An element plugin will derive from Element by importing::
from buildstream import Element from buildstream import Element
When importing utilities specifically, dont import function names When importing utilities specifically, don't import function names
from there, instead import the module itself:: from there, instead import the module itself::
from . import utils from . import utils
...@@ -737,7 +737,7 @@ Abstract methods ...@@ -737,7 +737,7 @@ Abstract methods
~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~
In BuildStream, an *"Abstract Method"* is a bit of a misnomer and does In BuildStream, an *"Abstract Method"* is a bit of a misnomer and does
not match up to how Python defines abstract methods, we need to seek out not match up to how Python defines abstract methods, we need to seek out
a new nomanclature to refer to these methods. a new nomenclature to refer to these methods.
In Python, an *"Abstract Method"* is a method which **must** be In Python, an *"Abstract Method"* is a method which **must** be
implemented by a subclass, whereas all methods in Python can be implemented by a subclass, whereas all methods in Python can be
...@@ -960,7 +960,7 @@ possible, and avoid any cyclic relationships in modules. ...@@ -960,7 +960,7 @@ possible, and avoid any cyclic relationships in modules.
For instance, the ``Source`` objects are owned by ``Element`` For instance, the ``Source`` objects are owned by ``Element``
objects in the BuildStream data model, and as such the ``Element`` objects in the BuildStream data model, and as such the ``Element``
will delegate some activities to the ``Source`` objects in its will delegate some activities to the ``Source`` objects in its
possesion. The ``Source`` objects should however never call functions possession. The ``Source`` objects should however never call functions
on the ``Element`` object, nor should the ``Source`` object itself on the ``Element`` object, nor should the ``Source`` object itself
have any understanding of what an ``Element`` is. have any understanding of what an ``Element`` is.
...@@ -1223,7 +1223,7 @@ For further information about using the reStructuredText with sphinx, please see ...@@ -1223,7 +1223,7 @@ For further information about using the reStructuredText with sphinx, please see
Building Docs Building Docs
~~~~~~~~~~~~~ ~~~~~~~~~~~~~
Before you can build the docs, you will end to ensure that you have installed Before you can build the docs, you will end to ensure that you have installed
the required :ref:`buid dependencies <contributing_build_deps>` as mentioned the required :ref:`build dependencies <contributing_build_deps>` as mentioned
in the testing section above. in the testing section above.
To build the documentation, just run the following:: To build the documentation, just run the following::
...@@ -1365,7 +1365,7 @@ Structure of an example ...@@ -1365,7 +1365,7 @@ Structure of an example
''''''''''''''''''''''' '''''''''''''''''''''''
The :ref:`tutorial <tutorial>` and the :ref:`examples <examples>` sections The :ref:`tutorial <tutorial>` and the :ref:`examples <examples>` sections
of the documentation contain a series of sample projects, each chapter in of the documentation contain a series of sample projects, each chapter in
the tutoral, or standalone example uses a sample project. the tutorial, or standalone example uses a sample project.
Here is the the structure for adding new examples and tutorial chapters. Here is the the structure for adding new examples and tutorial chapters.
...@@ -1471,8 +1471,8 @@ Installing build dependencies ...@@ -1471,8 +1471,8 @@ Installing build dependencies
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some of BuildStream's dependencies have non-python build dependencies. When Some of BuildStream's dependencies have non-python build dependencies. When
running tests with ``tox``, you will first need to install these dependencies. running tests with ``tox``, you will first need to install these dependencies.
Exact steps to install these will depend on your oprtation systemm. Commands Exact steps to install these will depend on your operating system. Commands
for installing them for some common distributions are lised below. for installing them for some common distributions are listed below.
For Fedora-based systems:: For Fedora-based systems::
...@@ -1498,6 +1498,13 @@ option when running tox:: ...@@ -1498,6 +1498,13 @@ option when running tox::
tox -e py37 tox -e py37
If you would like to test and lint at the same time, or if you do have multiple
python versions installed and would like to test against multiple versions, then
we recommend using `detox <https://github.com/tox-dev/detox>`_, just run it with
the same arguments you would give `tox`::
detox -e lint,py36,py37
Linting is performed separately from testing. In order to run the linting step which Linting is performed separately from testing. In order to run the linting step which
consists of running the ``pycodestyle`` and ``pylint`` tools, run the following:: consists of running the ``pycodestyle`` and ``pylint`` tools, run the following::
...@@ -1533,7 +1540,7 @@ the frontend tests you can do:: ...@@ -1533,7 +1540,7 @@ the frontend tests you can do::
tox -- tests/frontend/ tox -- tests/frontend/
Specific tests can be chosen by using the :: delimeter after the test module. Specific tests can be chosen by using the :: delimiter after the test module.
If you wanted to run the test_build_track test within frontend/buildtrack.py you could do:: If you wanted to run the test_build_track test within frontend/buildtrack.py you could do::
tox -- tests/frontend/buildtrack.py::test_build_track tox -- tests/frontend/buildtrack.py::test_build_track
...@@ -1553,7 +1560,7 @@ can run ``tox`` with ``-r`` or ``--recreate`` option. ...@@ -1553,7 +1560,7 @@ can run ``tox`` with ``-r`` or ``--recreate`` option.
.. note:: .. note::
By default, we do not allow use of site packages in our ``tox`` By default, we do not allow use of site packages in our ``tox``
confguration to enable running the tests in an isolated environment. configuration to enable running the tests in an isolated environment.
If you need to enable use of site packages for whatever reason, you can If you need to enable use of site packages for whatever reason, you can
do so by passing the ``--sitepackages`` option to ``tox``. Also, you will do so by passing the ``--sitepackages`` option to ``tox``. Also, you will
not need to install any of the build dependencies mentioned above if you not need to install any of the build dependencies mentioned above if you
...@@ -1574,10 +1581,23 @@ can run ``tox`` with ``-r`` or ``--recreate`` option. ...@@ -1574,10 +1581,23 @@ can run ``tox`` with ``-r`` or ``--recreate`` option.
./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track' ./setup.py test --addopts 'tests/frontend/buildtrack.py::test_build_track'
Observing coverage
~~~~~~~~~~~~~~~~~~
Once you have run the tests using `tox` (or `detox`), some coverage reports will
have been left behind.
To view the coverage report of the last test run, simply run::
tox -e coverage
This will collate any reports from separate python environments that may be
under test before displaying the combined coverage.
Adding tests Adding tests
~~~~~~~~~~~~ ~~~~~~~~~~~~
Tests are found in the tests subdirectory, inside of which Tests are found in the tests subdirectory, inside of which
there is a separarate directory for each *domain* of tests. there is a separate directory for each *domain* of tests.
All tests are collected as:: All tests are collected as::
tests/*/*.py tests/*/*.py
......
...@@ -17,6 +17,8 @@ ...@@ -17,6 +17,8 @@
# Authors: # Authors:
# Tristan Van Berkom <tristan.vanberkom@codethink.co.uk> # Tristan Van Berkom <tristan.vanberkom@codethink.co.uk>
from .. import _yaml
from .._exceptions import LoadError, LoadErrorReason, PlatformError
from .._platform import Platform from .._platform import Platform
from .optionenum import OptionEnum from .optionenum import OptionEnum
...@@ -41,7 +43,34 @@ class OptionArch(OptionEnum): ...@@ -41,7 +43,34 @@ class OptionArch(OptionEnum):
super(OptionArch, self).load(node, allow_default_definition=False) super(OptionArch, self).load(node, allow_default_definition=False)
def load_default_value(self, node): def load_default_value(self, node):
return Platform.get_host_arch() arch = Platform.get_host_arch()
default_value = None
for index, value in enumerate(self.values):
try:
canonical_value = Platform.canonicalize_arch(value)
if default_value is None and canonical_value == arch:
default_value = value
# Do not terminate the loop early to ensure we validate
# all values in the list.
except PlatformError as e:
provenance = _yaml.node_get_provenance(node, key='values', indices=[index])
prefix = ""
if provenance:
prefix = "{}: ".format(provenance)
raise LoadError(LoadErrorReason.INVALID_DATA,
"{}Invalid value for {} option '{}': {}"
.format(prefix, self.OPTION_TYPE, self.name, e))
if default_value is None:
# Host architecture is not supported by the project.
# Do not raise an error here as the user may override it.
# If the user does not override it, an error will be raised
# by resolve()/validate().
default_value = arch
return default_value
def resolve(self): def resolve(self):
......
...@@ -22,6 +22,7 @@ ...@@ -22,6 +22,7 @@
import os import os
import itertools import itertools
from operator import itemgetter from operator import itemgetter
from collections import OrderedDict
from ._exceptions import PipelineError from ._exceptions import PipelineError
from ._message import Message, MessageType from ._message import Message, MessageType
...@@ -135,9 +136,6 @@ class Pipeline(): ...@@ -135,9 +136,6 @@ class Pipeline():
# Preflight # Preflight
element._preflight() element._preflight()
# Determine initial element state.
element._update_state()
# dependencies() # dependencies()
# #
# Generator function to iterate over elements and optionally # Generator function to iterate over elements and optionally
...@@ -479,7 +477,7 @@ class Pipeline(): ...@@ -479,7 +477,7 @@ class Pipeline():
# #
class _Planner(): class _Planner():
def __init__(self): def __init__(self):
self.depth_map = {} self.depth_map = OrderedDict()
self.visiting_elements = set() self.visiting_elements = set()
# Here we want to traverse the same element more than once when # Here we want to traverse the same element more than once when
......
...@@ -77,20 +77,17 @@ class Platform(): ...@@ -77,20 +77,17 @@ class Platform():
def get_host_os(): def get_host_os():
return os.uname()[0] return os.uname()[0]
# get_host_arch(): # canonicalize_arch():
# #
# This returns the architecture of the host machine. The possible values # This returns the canonical, OS-independent architecture name
# map from uname -m in order to be a OS independent list. # or raises a PlatformError if the architecture is unknown.
# #
# Returns:
# (string): String representing the architecture
@staticmethod @staticmethod
def get_host_arch(): def canonicalize_arch(arch):
# get the hardware identifier from uname aliases = {
uname_machine = os.uname()[4] "aarch32": "aarch32",
uname_to_arch = {
"aarch64": "aarch64", "aarch64": "aarch64",
"aarch64_be": "aarch64-be", "aarch64-be": "aarch64-be",
"amd64": "x86-64", "amd64": "x86-64",
"arm": "aarch32", "arm": "aarch32",
"armv8l": "aarch64", "armv8l": "aarch64",
...@@ -99,17 +96,34 @@ class Platform(): ...@@ -99,17 +96,34 @@ class Platform():
"i486": "x86-32", "i486": "x86-32",
"i586": "x86-32", "i586": "x86-32",
"i686": "x86-32", "i686": "x86-32",
"power-isa-be": "power-isa-be",
"power-isa-le": "power-isa-le",
"ppc64": "power-isa-be", "ppc64": "power-isa-be",
"ppc64le": "power-isa-le", "ppc64le": "power-isa-le",
"sparc": "sparc-v9", "sparc": "sparc-v9",
"sparc64": "sparc-v9", "sparc64": "sparc-v9",
"x86_64": "x86-64" "sparc-v9": "sparc-v9",
"x86-32": "x86-32",
"x86-64": "x86-64"
} }
try: try:
return uname_to_arch[uname_machine] return aliases[arch.replace('_', '-')]
except KeyError: except KeyError:
raise PlatformError("uname gave unsupported machine architecture: {}" raise PlatformError("Unknown architecture: {}".format(arch))
.format(uname_machine))
# get_host_arch():
#
# This returns the architecture of the host machine. The possible values
# map from uname -m in order to be a OS independent list.
#
# Returns:
# (string): String representing the architecture
@staticmethod
def get_host_arch():
# get the hardware identifier from uname
uname_machine = os.uname()[4]
return Platform.canonicalize_arch(uname_machine)
################################################################## ##################################################################
# Sandbox functions # # Sandbox functions #
......
...@@ -71,9 +71,6 @@ class BuildQueue(Queue): ...@@ -71,9 +71,6 @@ class BuildQueue(Queue):
return element._assemble() return element._assemble()
def status(self, element): def status(self, element):
# state of dependencies may have changed, recalculate element state
element._update_state()
if not element._is_required(): if not element._is_required():
# Artifact is not currently required but it may be requested later. # Artifact is not currently required but it may be requested later.
# Keep it in the queue. # Keep it in the queue.
......
...@@ -44,9 +44,6 @@ class FetchQueue(Queue): ...@@ -44,9 +44,6 @@ class FetchQueue(Queue):
element._fetch() element._fetch()
def status(self, element): def status(self, element):
# state of dependencies may have changed, recalculate element state
element._update_state()
if not element._is_required(): if not element._is_required():
# Artifact is not currently required but it may be requested later. # Artifact is not currently required but it may be requested later.
# Keep it in the queue. # Keep it in the queue.
...@@ -72,7 +69,7 @@ class FetchQueue(Queue): ...@@ -72,7 +69,7 @@ class FetchQueue(Queue):
if status == JobStatus.FAIL: if status == JobStatus.FAIL:
return return
element._update_state() element._fetch_done()
# Successful fetch, we must be CACHED now # Successful fetch, we must be CACHED now
assert element._get_consistency() == Consistency.CACHED assert element._get_consistency() == Consistency.CACHED
...@@ -39,9 +39,6 @@ class PullQueue(Queue): ...@@ -39,9 +39,6 @@ class PullQueue(Queue):
raise SkipJob(self.action_name) raise SkipJob(self.action_name)
def status(self, element): def status(self, element):
# state of dependencies may have changed, recalculate element state
element._update_state()
if not element._is_required(): if not element._is_required():
# Artifact is not currently required but it may be requested later. # Artifact is not currently required but it may be requested later.
# Keep it in the queue. # Keep it in the queue.
......
...@@ -1018,17 +1018,6 @@ class Stream(): ...@@ -1018,17 +1018,6 @@ class Stream():
_, status = self._scheduler.run(self.queues) _, status = self._scheduler.run(self.queues)
# Force update element states after a run, such that the summary
# is more coherent
try:
for element in self.total_elements:
element._update_state()
except BstError as e:
self._message(MessageType.ERROR, "Error resolving final state", detail=str(e))
set_last_task_error(e.domain, e.reason)
except Exception as e: # pylint: disable=broad-except
self._message(MessageType.BUG, "Unhandled exception while resolving final state", detail=str(e))
if status == SchedStatus.ERROR: if status == SchedStatus.ERROR:
raise StreamError() raise StreamError()
elif status == SchedStatus.TERMINATED: elif status == SchedStatus.TERMINATED:
......
...@@ -197,6 +197,7 @@ class Element(Plugin): ...@@ -197,6 +197,7 @@ class Element(Plugin):
self.__runtime_dependencies = [] # Direct runtime dependency Elements self.__runtime_dependencies = [] # Direct runtime dependency Elements
self.__build_dependencies = [] # Direct build dependency Elements self.__build_dependencies = [] # Direct build dependency Elements
self.__reverse_build_dependencies = [] # Direct reverse build dependency Elements
self.__sources = [] # List of Sources self.__sources = [] # List of Sources
self.__weak_cache_key = None # Our cached weak cache key self.__weak_cache_key = None # Our cached weak cache key
self.__strict_cache_key = None # Our cached cache key for strict builds self.__strict_cache_key = None # Our cached cache key for strict builds
...@@ -227,6 +228,8 @@ class Element(Plugin): ...@@ -227,6 +228,8 @@ class Element(Plugin):
self.__metadata_workspaced = {} # Boolean of whether it's workspaced self.__metadata_workspaced = {} # Boolean of whether it's workspaced
self.__metadata_workspaced_dependencies = {} # List of which dependencies are workspaced self.__metadata_workspaced_dependencies = {} # List of which dependencies are workspaced
self.__is_workspaced = None
# Ensure we have loaded this class's defaults # Ensure we have loaded this class's defaults
self.__init_defaults(plugin_conf) self.__init_defaults(plugin_conf)
...@@ -370,6 +373,13 @@ class Element(Plugin): ...@@ -370,6 +373,13 @@ class Element(Plugin):
############################################################# #############################################################
# Public Methods # # Public Methods #
############################################################# #############################################################
@property
def is_workspaced(self):
if self.__is_workspaced is None:
self.__is_workspaced = self._get_workspace() is not None
return self.__is_workspaced
def sources(self): def sources(self):
"""A generator function to enumerate the element sources """A generator function to enumerate the element sources
...@@ -439,6 +449,27 @@ class Element(Plugin): ...@@ -439,6 +449,27 @@ class Element(Plugin):
if should_yield and (recurse or recursed) and scope != Scope.BUILD: if should_yield and (recurse or recursed) and scope != Scope.BUILD:
yield self yield self
def reverse_build_dependencies(self, recurse=False):
if not recurse:
yield from self.__reverse_build_dependencies
return
# visited = set()
def recurse_rdeps(element):
# if element in visited:
# return
# visited.add(element)
yield element
for rdep in element.__reverse_build_dependencies:
yield from recurse_rdeps(rdep)
for rdep in self.__reverse_build_dependencies:
yield from recurse_rdeps(rdep)
def search(self, scope, name): def search(self, scope, name):
"""Search for a dependency by name """Search for a dependency by name
...@@ -930,6 +961,7 @@ class Element(Plugin): ...@@ -930,6 +961,7 @@ class Element(Plugin):
for meta_dep in meta.build_dependencies: for meta_dep in meta.build_dependencies:
dependency = Element._new_from_meta(meta_dep) dependency = Element._new_from_meta(meta_dep)
element.__build_dependencies.append(dependency) element.__build_dependencies.append(dependency)
dependency.__reverse_build_dependencies.append(element)
return element return element
...@@ -1279,6 +1311,9 @@ class Element(Plugin): ...@@ -1279,6 +1311,9 @@ class Element(Plugin):
for source in self.sources(): for source in self.sources():
source._preflight() source._preflight()
# Determine initial element state.
self._update_state()
# _schedule_tracking(): # _schedule_tracking():
# #
# Force an element state to be inconsistent. Any sources appear to be # Force an element state to be inconsistent. Any sources appear to be
...@@ -1306,6 +1341,9 @@ class Element(Plugin): ...@@ -1306,6 +1341,9 @@ class Element(Plugin):
self._update_state() self._update_state()
for rdep in self.reverse_build_dependencies(recurse=True):
rdep._update_state()
# _track(): # _track():
# #
# Calls track() on the Element sources # Calls track() on the Element sources
...@@ -1446,6 +1484,7 @@ class Element(Plugin): ...@@ -1446,6 +1484,7 @@ class Element(Plugin):
# This unblocks pull/fetch/build. # This unblocks pull/fetch/build.
# #
def _set_required(self): def _set_required(self):
# FIXME: this should enqueue stuff in the queue, it should not be here by default
if self.__required: if self.__required:
# Already done # Already done
return return
...@@ -1456,6 +1495,7 @@ class Element(Plugin): ...@@ -1456,6 +1495,7 @@ class Element(Plugin):
for dep in self.dependencies(Scope.RUN, recurse=False): for dep in self.dependencies(Scope.RUN, recurse=False):
dep._set_required() dep._set_required()
# FIXME: this should not be done at all here
self._update_state() self._update_state()
# _is_required(): # _is_required():
...@@ -1499,10 +1539,16 @@ class Element(Plugin): ...@@ -1499,10 +1539,16 @@ class Element(Plugin):
self.__assemble_scheduled = False self.__assemble_scheduled = False
self.__assemble_done = True self.__assemble_done = True
# FIXME: only if workspaced
self._update_state() self._update_state()
if self._get_workspace() and self._cached_success(): if self.is_workspaced:
# Update the state of all reverse dependencies
for reverse_dependency in self.reverse_build_dependencies(recurse=True):
reverse_dependency._update_state()
if self.is_workspaced and self._cached_success():
assert utils._is_main_process(), \ assert utils._is_main_process(), \
"Attempted to save workspace configuration from child process" "Attempted to save workspace configuration from child process"
# #
...@@ -2035,6 +2081,9 @@ class Element(Plugin): ...@@ -2035,6 +2081,9 @@ class Element(Plugin):
source._fetch(previous_sources) source._fetch(previous_sources)
previous_sources.append(source) previous_sources.append(source)
def _fetch_done(self):
self._update_state()
############################################################# #############################################################
# Private Local Methods # # Private Local Methods #
############################################################# #############################################################
...@@ -2441,11 +2490,17 @@ class Element(Plugin): ...@@ -2441,11 +2490,17 @@ class Element(Plugin):
# Sandbox config, unlike others, has fixed members so we should validate them # Sandbox config, unlike others, has fixed members so we should validate them
_yaml.node_validate(sandbox_config, ['build-uid', 'build-gid', 'build-os', 'build-arch']) _yaml.node_validate(sandbox_config, ['build-uid', 'build-gid', 'build-os', 'build-arch'])
build_arch = self.node_get_member(sandbox_config, str, 'build-arch', default=None)
if build_arch:
build_arch = Platform.canonicalize_arch(build_arch)
else:
build_arch = host_arch
return SandboxConfig( return SandboxConfig(
self.node_get_member(sandbox_config, int, 'build-uid'), self.node_get_member(sandbox_config, int, 'build-uid'),
self.node_get_member(sandbox_config, int, 'build-gid'), self.node_get_member(sandbox_config, int, 'build-gid'),
self.node_get_member(sandbox_config, str, 'build-os', default=host_os), self.node_get_member(sandbox_config, str, 'build-os', default=host_os),
self.node_get_member(sandbox_config, str, 'build-arch', default=host_arch)) build_arch)
# This makes a special exception for the split rules, which # This makes a special exception for the split rules, which
# elements may extend but whos defaults are defined in the project. # elements may extend but whos defaults are defined in the project.
......
kind: autotools
variables:
result: "Nothing"
(?):
- machine_arch == "arm":
result: "Army"
- machine_arch == "x86_64":
result: "X86-64y"
name: test
options:
machine_arch:
type: arch
description: The machine architecture
values:
- arm
- x86_64
kind: autotools
variables:
result: "Nothing"
(?):
- machine_arch == "aarch32":
result: "Army"
- machine_arch == "aarch64":
result: "Aarchy"
- machine_arch == "x86-128":
result: "X86-128y"
name: test
options:
machine_arch:
type: arch
description: The machine architecture
values:
- aarch32
- aarch64
- x86-128
...@@ -75,3 +75,47 @@ def test_unsupported_arch(cli, datafiles): ...@@ -75,3 +75,47 @@ def test_unsupported_arch(cli, datafiles):
]) ])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA) result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
@pytest.mark.datafiles(DATA_DIR)
def test_alias(cli, datafiles):
with override_uname_arch("arm"):
project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch-alias')
result = cli.run(project=project, silent=True, args=[
'show',
'--deps', 'none',
'--format', '%{vars}',
'element.bst'
])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
def test_unknown_host_arch(cli, datafiles):
with override_uname_arch("x86_128"):
project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch')
result = cli.run(project=project, silent=True, args=[
'show',
'--deps', 'none',
'--format', '%{vars}',
'element.bst'
])
result.assert_main_error(ErrorDomain.PLATFORM, None)
@pytest.mark.datafiles(DATA_DIR)
def test_unknown_project_arch(cli, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename, 'option-arch-unknown')
result = cli.run(project=project, silent=True, args=[
'show',
'--deps', 'none',
'--format', '%{vars}',
'element.bst'
])
result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
import os
import pytest
from tests.testutils import cli, create_repo
from buildstream import _yaml
# Project directory
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"project",
)
def create_element(repo, name, path, dependencies, ref=None):
element = {
'kind': 'import',
'sources': [
repo.source_config(ref=ref)
],
'depends': dependencies
}
_yaml.dump(element, os.path.join(path, name))
# This tests a variety of scenarios and checks that the order in
# which things are processed remains stable.
#
# This is especially important in order to ensure that our
# depth sorting and optimization of which elements should be
# processed first is doing it's job right, and that we are
# promoting elements to the build queue as soon as possible
#
# Parameters:
# targets (target elements): The targets to invoke bst with
# template (dict): The project template dictionary, for create_element()
# expected (list): A list of element names in the expected order
#
@pytest.mark.datafiles(os.path.join(DATA_DIR))
@pytest.mark.parametrize("target,template,expected", [
# First simple test
('3.bst', {
'0.bst': ['1.bst'],
'1.bst': [],
'2.bst': ['0.bst'],
'3.bst': ['0.bst', '1.bst', '2.bst']
}, ['1.bst', '0.bst', '2.bst', '3.bst']),
# A more complicated test with build of build dependencies
('target.bst', {
'a.bst': [],
'base.bst': [],
'timezones.bst': [],
'middleware.bst': [{'filename': 'base.bst', 'type': 'build'}],
'app.bst': [{'filename': 'middleware.bst', 'type': 'build'}],
'target.bst': ['a.bst', 'base.bst', 'middleware.bst', 'app.bst', 'timezones.bst']
}, ['base.bst', 'middleware.bst', 'a.bst', 'app.bst', 'timezones.bst', 'target.bst']),
])
@pytest.mark.parametrize("operation", [('show'), ('fetch'), ('build')])
def test_order(cli, datafiles, tmpdir, operation, target, template, expected):
project = os.path.join(datafiles.dirname, datafiles.basename)
dev_files_path = os.path.join(project, 'files', 'dev-files')
element_path = os.path.join(project, 'elements')
# FIXME: Remove this when the test passes reliably.
#
# There is no reason why the order should not
# be preserved when the builders is set to 1,
# the scheduler queue processing still seems to
# be losing the order.
#
if operation == 'build':
pytest.skip("FIXME: This still only sometimes passes")
# Configure to only allow one fetcher at a time, make it easy to
# determine what is being planned in what order.
cli.configure({
'scheduler': {
'fetchers': 1,
'builders': 1
}
})
# Build the project from the template, make import elements
# all with the same repo
#
repo = create_repo('git', str(tmpdir))
ref = repo.create(dev_files_path)
for element, dependencies in template.items():
create_element(repo, element, element_path, dependencies, ref=ref)
repo.add_commit()
# Run test and collect results
if operation == 'show':
result = cli.run(args=['show', '--deps', 'plan', '--format', '%{name}', target], project=project, silent=True)
result.assert_success()
results = result.output.splitlines()
else:
if operation == 'fetch':
result = cli.run(args=['source', 'fetch', target], project=project, silent=True)
else:
result = cli.run(args=[operation, target], project=project, silent=True)
result.assert_success()
results = result.get_start_order(operation)
# Assert the order
print("Expected order: {}".format(expected))
print("Observed result order: {}".format(results))
assert results == expected
...@@ -167,6 +167,23 @@ class Result(): ...@@ -167,6 +167,23 @@ class Result():
def assert_shell_error(self, fail_message=''): def assert_shell_error(self, fail_message=''):
assert self.exit_code == 1, fail_message assert self.exit_code == 1, fail_message
# get_start_order()
#
# Gets the list of elements processed in a given queue, in the
# order of their first appearances in the session.
#
# Args:
# activity (str): The queue activity name (like 'fetch')
#
# Returns:
# (list): A list of element names in the order which they first appeared in the result
#
def get_start_order(self, activity):
results = re.findall(r'\[\s*{}:(\S+)\s*\]\s*START\s*.*\.log'.format(activity), self.stderr)
if results is None:
return []
return list(results)
# get_tracked_elements() # get_tracked_elements()
# #
# Produces a list of element names on which tracking occurred # Produces a list of element names on which tracking occurred
......
#
# Tox global configuration
#
[tox] [tox]
envlist = py35,py36,py37 envlist = py35,py36,py37
skip_missing_interpreters = true skip_missing_interpreters = true
#
# Defaults for all environments
#
# Anything specified here is iherited by the sections
#
[testenv] [testenv]
commands = pytest {posargs} commands =
pytest --basetemp {envtmpdir} {posargs}
mkdir -p .coverage-reports
mv {envtmpdir}/.coverage {toxinidir}/.coverage-reports/.coverage.{env:COVERAGE_PREFIX:}{envname}
deps = deps =
-rrequirements/requirements.txt -rrequirements/requirements.txt
-rrequirements/dev-requirements.txt -rrequirements/dev-requirements.txt
...@@ -13,6 +24,32 @@ passenv = ...@@ -13,6 +24,32 @@ passenv =
GI_TYPELIB_PATH GI_TYPELIB_PATH
INTEGRATION_CACHE INTEGRATION_CACHE
#
# These keys are not inherited by any other sections
#
setenv =
py{35,36,37}: COVERAGE_FILE = {envtmpdir}/.coverage
whitelist_externals =
py{35,36,37}:
mv
mkdir
#
# Coverage reporting
#
[testenv:coverage]
commands =
- coverage combine --rcfile={toxinidir}/.coveragerc {toxinidir}/.coverage-reports/
coverage report --rcfile={toxinidir}/.coveragerc -m
deps =
-rrequirements/requirements.txt
-rrequirements/dev-requirements.txt
setenv =
COVERAGE_FILE = {toxinidir}/.coverage-reports/.coverage
#
# Running linters
#
[testenv:lint] [testenv:lint]
commands = commands =
pycodestyle pycodestyle
...@@ -22,6 +59,9 @@ deps = ...@@ -22,6 +59,9 @@ deps =
-rrequirements/dev-requirements.txt -rrequirements/dev-requirements.txt
-rrequirements/plugin-requirements.txt -rrequirements/plugin-requirements.txt
#
# Building documentation
#
[testenv:docs] [testenv:docs]
commands = commands =
make -C doc make -C doc
......