Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (13)
Showing
with 427 additions and 5 deletions
......@@ -84,6 +84,8 @@ source_dist:
- cd ../..
- mkdir -p coverage-linux/
- cp dist/buildstream/.coverage.* coverage-linux/coverage."${CI_JOB_NAME}"
except:
- schedules
artifacts:
paths:
- coverage-linux/
......@@ -125,6 +127,8 @@ tests-unix:
- cd ../..
- mkdir -p coverage-unix/
- cp dist/buildstream/.coverage.* coverage-unix/coverage.unix
except:
- schedules
artifacts:
paths:
- coverage-unix/
......@@ -147,10 +151,51 @@ docs:
- make BST_FORCE_SESSION_REBUILD=1 -C doc
- cd ../..
- mv dist/buildstream/doc/build/html public
except:
- schedules
artifacts:
paths:
- public/
.system-tests: &system-tests-template
stage: test
variables:
XDG_CACHE_HOME: /cache
bst_ext_url: git+https://gitlab.com/BuildStream/bst-external.git
bst_ext_ref: cf53cff64f4a6b3b3e2f0ac7ec13290511e9a137 # 0.2
fd_sdk_ref: 2472d5a178149593ddeab0feb2be1ddd0348eb75
expected_cache_key: ceee9b5a
expected_md5sum: 083966a5dc914d52e5245af3a06e933e
before_script:
- (cd dist && ./unpack.sh && cd buildstream && pip3 install .)
- pip3 install --user -e ${bst_ext_url}@${bst_ext_ref}#egg=bst_ext
- git clone https://gitlab.com/freedesktop-sdk/freedesktop-sdk.git
- git -C freedesktop-sdk checkout ${fd_sdk_ref}
#when:
#- schedules
system-tests:
<<: *system-tests-template
script:
- make -C freedesktop-sdk
- cache_key=$(bst -C sdk bst all.bst --deps none --format %{key})
- test ${cache_key} == ${expected_cache_key}
- bst -C sdk checkout --tar all.bst all.tar
- checksum=$(md5sum all.tar)
- test ${checksum} == ${expected_checksum}
system-tests-no-cache:
<<: *system-tests-template
script:
- sed -i '/artifacts:/,+1 d' freedesktop-sdk/bootstrap/project.conf
- sed -i '/artifacts:/,+1 d' freedesktop-sdk/sdk/project.conf
- make -C freedesktop-sdk
- cache_key=$(bst -C sdk bst all.bst --deps none --format %{key})
- test ${cache_key} == ${expected_cache_key}
- bst -C sdk checkout --tar all.bst all.tar
- checksum=$(md5sum all.tar)
- test ${checksum} == ${expected_checksum}
#####################################################
# Post stage #
......@@ -175,6 +220,8 @@ codequality:
--volume "$PWD":/code
--volume /var/run/docker.sock:/var/run/docker.sock
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
except:
- schedules
artifacts:
paths: [codeclimate.json]
......@@ -200,6 +247,8 @@ analysis:
radon raw -s -j buildstream > analysis/raw.json
radon raw -s buildstream
except:
- schedules
artifacts:
paths:
- analysis/
......@@ -224,6 +273,8 @@ coverage:
- tests-fedora-27
- tests-unix
- source_dist
except:
- schedules
# Deploy, only for merges which land on master branch.
#
......@@ -248,3 +299,5 @@ pages:
# See https://gitlab.com/gitlab-org/gitlab-ce/issues/35141
#
- master
except:
- schedules
......@@ -240,7 +240,6 @@ class CASCache(ArtifactCache):
except grpc.RpcError as e:
if e.code() != grpc.StatusCode.NOT_FOUND:
element.info("{} not found at remote {}".format(element._get_brief_display_key(), remote.spec.url))
raise
return False
......
......@@ -270,6 +270,10 @@ class App():
# Exit with the error
self._error_exit(e)
except RecursionError:
click.echo("RecursionError: Depency depth is too large. Maximum recursion depth exceeded.",
err=True)
sys.exit(-1)
else:
# No exceptions occurred, print session time and summary
......
......@@ -476,7 +476,7 @@ class Stream():
# Check for workspace config
workspace = workspaces.get_workspace(target._get_full_name())
if workspace:
if workspace and not force:
raise StreamError("Workspace '{}' is already defined at: {}"
.format(target.name, workspace.path))
......@@ -495,6 +495,10 @@ class Stream():
"fetch the latest version of the " +
"source.")
if workspace:
workspaces.delete_workspace(target._get_full_name())
workspaces.save_config()
shutil.rmtree(directory)
try:
os.makedirs(directory, exist_ok=True)
except OSError as e:
......
......@@ -23,7 +23,7 @@
# This version is bumped whenever enhancements are made
# to the `project.conf` format or the core element format.
#
BST_FORMAT_VERSION = 9
BST_FORMAT_VERSION = 10
# The base BuildStream artifact version
......
#
# Copyright Bloomberg Finance LP
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Ed Baunton <ebaunton1@bloomberg.net>
"""
remote - stage files from remote urls
=====================================
**Usage:**
.. code:: yaml
# Specify the remote source kind
kind: remote
# Optionally specify a relative staging directory
# directory: path/to/stage
# Optionally specify a relative staging filename.
# If not specified, the basename of the url will be used.
# filename: customfilename
# Specify the url. Using an alias defined in your project
# configuration is encouraged. 'bst track' will update the
# sha256sum in 'ref' to the downloaded file's sha256sum.
url: upstream:foo
# Specify the ref. It's a sha256sum of the file you download.
ref: 6c9f6f68a131ec6381da82f2bff978083ed7f4f7991d931bfa767b7965ebc94b
.. note::
The ``remote`` plugin is available since :ref:`format version 10 <project_format_version>`
"""
import os
from buildstream import SourceError, utils
from ._downloadablefilesource import DownloadableFileSource
class RemoteSource(DownloadableFileSource):
# pylint: disable=attribute-defined-outside-init
def configure(self, node):
super().configure(node)
self.filename = self.node_get_member(node, str, 'filename', os.path.basename(self.url))
if os.sep in self.filename:
raise SourceError('{}: filename parameter cannot contain directories'.format(self),
reason="filename-contains-directory")
self.node_validate(node, DownloadableFileSource.COMMON_CONFIG_KEYS + ['filename'])
def get_unique_key(self):
return super().get_unique_key() + [self.filename]
def stage(self, directory):
# Same as in local plugin, don't use hardlinks to stage sources, they
# are not write protected in the sandbox.
dest = os.path.join(directory, self.filename)
with self.timed_activity("Staging remote file to {}".format(dest)):
utils.safe_copy(self._get_mirror_file(), dest)
def setup():
return RemoteSource
......@@ -802,6 +802,12 @@ def _process_list(srcdir, destdir, filelist, actionfunc, result,
os.mknod(destpath, file_stat.st_mode, file_stat.st_rdev)
os.chmod(destpath, file_stat.st_mode)
elif stat.S_ISFIFO(mode):
os.mkfifo(destpath, mode)
elif stat.S_ISSOCK(mode):
pass
else:
# Unsupported type.
raise UtilError('Cannot extract {} into staging-area. Unsupported type.'.format(srcpath))
......
......@@ -50,6 +50,7 @@ Sources
:maxdepth: 1
sources/local
sources/remote
sources/tar
sources/zip
sources/git
......
import os
import pytest
import sys
import shutil
import itertools
import pytest
from tests.testutils import cli
from buildstream import _yaml
from buildstream._exceptions import ErrorDomain, LoadErrorReason
......@@ -232,3 +233,58 @@ def test_fetched_junction(cli, tmpdir, datafiles, element_name):
results = result.output.strip().splitlines()
assert 'junction.bst:import-etc.bst-buildable' in results
###############################################################
# Testing recursion depth #
###############################################################
@pytest.mark.parametrize("dependency_depth", [100, 500, 1200])
def test_exceed_max_recursion_depth(cli, tmpdir, dependency_depth):
project_name = "recursion-test"
path = str(tmpdir)
project_path = os.path.join(path, project_name)
def setup_test():
"""
Creates a bst project with dependencydepth + 1 elements, each of which
depends of the previous element to be created. Each element created
is of type import and has an empty source file.
"""
os.mkdir(project_path)
result = cli.run(project=project_path, silent=True,
args=['init', '--project-name', project_name])
result.assert_success()
sourcefiles_path = os.path.join(project_path, "files")
os.mkdir(sourcefiles_path)
element_path = os.path.join(project_path, "elements")
for i in range(0, dependency_depth + 1):
element = {
'kind': 'import',
'sources': [{'kind': 'local',
'path': 'files/source{}'.format(str(i))}],
'depends': ['element{}.bst'.format(str(i - 1))]
}
if i == 0:
del element['depends']
_yaml.dump(element, os.path.join(element_path, "element{}.bst".format(str(i))))
source = os.path.join(sourcefiles_path, "source{}".format(str(i)))
open(source, 'x').close()
assert os.path.exists(source)
setup_test()
result = cli.run(project=project_path, silent=True,
args=['show', "element{}.bst".format(str(dependency_depth))])
recursion_limit = sys.getrecursionlimit()
if dependency_depth <= recursion_limit:
result.assert_success()
else:
# Assert exception is thown and handled
assert not result.unhandled_exception
assert result.exit_code == -1
shutil.rmtree(project_path)
......@@ -123,6 +123,58 @@ def test_open_force(cli, tmpdir, datafiles, kind):
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("kind", repo_kinds)
def test_open_force_open(cli, tmpdir, datafiles, kind):
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False)
# Assert the workspace dir exists
assert os.path.exists(workspace)
# Now open the workspace again with --force, this should happily succeed
result = cli.run(project=project, args=[
'workspace', 'open', '--force', element_name, workspace
])
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("kind", repo_kinds)
def test_open_force_different_workspace(cli, tmpdir, datafiles, kind):
element_name, project, workspace = open_workspace(cli, tmpdir, datafiles, kind, False, "-alpha")
# Assert the workspace dir exists
assert os.path.exists(workspace)
hello_path = os.path.join(workspace, 'usr', 'bin', 'hello')
hello1_path = os.path.join(workspace, 'usr', 'bin', 'hello1')
tmpdir = os.path.join(str(tmpdir), "-beta")
shutil.move(hello_path, hello1_path)
element_name2, project2, workspace2 = open_workspace(cli, tmpdir, datafiles, kind, False, "-beta")
# Assert the workspace dir exists
assert os.path.exists(workspace2)
# Assert that workspace 1 contains the modified file
assert os.path.exists(hello1_path)
# Assert that workspace 2 contains the unmodified file
assert os.path.exists(os.path.join(workspace2, 'usr', 'bin', 'hello'))
# Now open the workspace again with --force, this should happily succeed
result = cli.run(project=project, args=[
'workspace', 'open', '--force', element_name2, workspace
])
# Assert that the file in workspace 1 has been replaced
# With the file from workspace 2
assert os.path.exists(hello_path)
assert not os.path.exists(hello1_path)
result.assert_success()
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("kind", repo_kinds)
def test_close(cli, tmpdir, datafiles, kind):
......
import os
import pytest
from buildstream._exceptions import ErrorDomain
from buildstream import _yaml
from tests.testutils import cli
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'remote',
)
def generate_project(project_dir, tmpdir):
project_file = os.path.join(project_dir, "project.conf")
_yaml.dump({
'name': 'foo',
'aliases': {
'tmpdir': "file:///" + str(tmpdir)
}
}, project_file)
# Test that without ref, consistency is set appropriately.
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'no-ref'))
def test_no_ref(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
assert cli.get_element_state(project, 'target.bst') == 'no reference'
# Here we are doing a fetch on a file that doesn't exist. target.bst
# refers to 'file' but that file is not present.
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'missing-file'))
def test_missing_file(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
# Try to fetch it
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
result.assert_main_error(ErrorDomain.STREAM, None)
result.assert_task_error(ErrorDomain.SOURCE, None)
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'path-in-filename'))
def test_path_in_filename(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
# Try to fetch it
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
# The bst file has a / in the filename param
result.assert_main_error(ErrorDomain.SOURCE, "filename-contains-directory")
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'single-file'))
def test_simple_file_build(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Try to fetch it
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
result.assert_success()
result = cli.run(project=project, args=[
'build', 'target.bst'
])
result.assert_success()
result = cli.run(project=project, args=[
'checkout', 'target.bst', checkoutdir
])
result.assert_success()
# Note that the url of the file in target.bst is actually /dir/file
# but this tests confirms we take the basename
assert(os.path.exists(os.path.join(checkoutdir, 'file')))
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'single-file-custom-name'))
def test_simple_file_custom_name_build(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
checkoutdir = os.path.join(str(tmpdir), "checkout")
# Try to fetch it
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
result.assert_success()
result = cli.run(project=project, args=[
'build', 'target.bst'
])
result.assert_success()
result = cli.run(project=project, args=[
'checkout', 'target.bst', checkoutdir
])
result.assert_success()
assert(not os.path.exists(os.path.join(checkoutdir, 'file')))
assert(os.path.exists(os.path.join(checkoutdir, 'custom-file')))
@pytest.mark.datafiles(os.path.join(DATA_DIR, 'unique-keys'))
def test_unique_key(cli, tmpdir, datafiles):
'''This test confirms that the 'filename' parameter is honoured when it comes
to generating a cache key for the source.
'''
project = os.path.join(datafiles.dirname, datafiles.basename)
generate_project(project, tmpdir)
assert cli.get_element_state(project, 'target.bst') == "fetch needed"
assert cli.get_element_state(project, 'target-custom.bst') == "fetch needed"
# Try to fetch it
result = cli.run(project=project, args=[
'fetch', 'target.bst'
])
# We should download the file only once
assert cli.get_element_state(project, 'target.bst') == 'buildable'
assert cli.get_element_state(project, 'target-custom.bst') == 'buildable'
# But the cache key is different because the 'filename' is different.
assert cli.get_element_key(project, 'target.bst') != \
cli.get_element_key(project, 'target-custom.bst')
kind: autotools
description: The kind of this element is irrelevant.
sources:
- kind: remote
url: tmpdir:/file
ref: abcdef
filename: filename
filecontent
kind: autotools
description: The kind of this element is irrelevant.
sources:
- kind: remote
url: tmpdir:/file
kind: import
description: test
sources:
- kind: remote
url: tmpdir:/dir/file
ref: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
filename: path/to/file
kind: import
description: test
sources:
- kind: remote
url: tmpdir:/dir/file
ref: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
filename: custom-file
kind: import
description: test
sources:
- kind: remote
url: tmpdir:/dir/file
ref: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855