Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • willsalmon/buildstream
  • CumHoleZH/buildstream
  • tchaik/buildstream
  • DCotyPortfolio/buildstream
  • jesusoctavioas/buildstream
  • patrickmmartin/buildstream
  • franred/buildstream
  • tintou/buildstream
  • alatiera/buildstream
  • martinblanchard/buildstream
  • neverdie22042524/buildstream
  • Mattlk13/buildstream
  • PServers/buildstream
  • phamnghia610909/buildstream
  • chiaratolentino/buildstream
  • eysz7-x-x/buildstream
  • kerrick1/buildstream
  • matthew-yates/buildstream
  • twofeathers/buildstream
  • mhadjimichael/buildstream
  • pointswaves/buildstream
  • Mr.JackWilson/buildstream
  • Tw3akG33k/buildstream
  • AlexFazakas/buildstream
  • eruidfkiy/buildstream
  • clamotion2/buildstream
  • nanonyme/buildstream
  • wickyjaaa/buildstream
  • nmanchev/buildstream
  • bojorquez.ja/buildstream
  • mostynb/buildstream
  • highpit74/buildstream
  • Demo112/buildstream
  • ba2014sheer/buildstream
  • tonimadrino/buildstream
  • usuario2o/buildstream
  • Angelika123456/buildstream
  • neo355/buildstream
  • corentin-ferlay/buildstream
  • coldtom/buildstream
  • wifitvbox81/buildstream
  • 358253885/buildstream
  • seanborg/buildstream
  • SotK/buildstream
  • DouglasWinship/buildstream
  • karansthr97/buildstream
  • louib/buildstream
  • bwh-ct/buildstream
  • robjh/buildstream
  • we88c0de/buildstream
  • zhengxian5555/buildstream
51 results
Show changes
Commits on Source (9)
......@@ -60,6 +60,9 @@ class Context():
# The directory where build sandboxes will be created
self.builddir = None
# Default root location for workspaces
self.workspacedir = None
# The local binary artifact cache directory
self.artifactdir = None
......@@ -161,10 +164,10 @@ class Context():
_yaml.node_validate(defaults, [
'sourcedir', 'builddir', 'artifactdir', 'logdir',
'scheduler', 'artifacts', 'logging', 'projects',
'cache'
'cache', 'workspacedir',
])
for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir', 'workspacedir']:
# Allow the ~ tilde expansion and any environment variables in
# path specification in the config files.
#
......
......@@ -678,28 +678,37 @@ def workspace():
@click.option('--no-checkout', default=False, is_flag=True,
help="Do not checkout the source, only link to the given directory")
@click.option('--force', '-f', default=False, is_flag=True,
help="Overwrite files existing in checkout directory")
help="The workspace will be created even if the directory in which it will be created is not empty" +
"or if a workspace for that element already exists")
@click.option('--track', 'track_', default=False, is_flag=True,
help="Track and fetch new source references before checking out the workspace")
@click.argument('element',
type=click.Path(readable=False))
@click.argument('directory', type=click.Path(file_okay=False))
@click.option('--directory', type=click.Path(file_okay=False), default=None,
help="If only one element is given then the workspace will be created in the path given rather than" +
"being automatically generated")
@click.argument('elements', nargs=-1, type=click.Path(readable=False))
@click.pass_obj
def workspace_open(app, no_checkout, force, track_, element, directory):
def workspace_open(app, no_checkout, force, track_, directory, elements):
"""Open a workspace for manual source modification"""
if os.path.exists(directory):
if not os.path.isdir(directory):
click.echo("Checkout directory is not a directory: {}".format(directory), err=True)
directories = []
if directory is not None:
if len(elements) > 1:
click.echo("Directory option can only be used if a single element is given", err=True)
sys.exit(-1)
if os.path.exists(directory):
if not os.path.isdir(directory):
click.echo("Directory path is not a directory: {}".format(directory), err=True)
sys.exit(-1)
if not (no_checkout or force) and os.listdir(directory):
click.echo("Checkout directory is not empty: {}".format(directory), err=True)
sys.exit(-1)
if not (no_checkout or force) and os.listdir(directory):
click.echo("Directory path is not empty: {}".format(directory), err=True)
sys.exit(-1)
directories.append({'dir': directory, 'force': True})
else:
for element in elements:
directories.append({'dir': element.rstrip('.bst'), 'force': False})
with app.initialized():
app.stream.workspace_open(element, directory,
app.stream.workspace_open(elements, directories,
no_checkout=no_checkout,
track_first=track_,
force=force)
......
......@@ -423,9 +423,16 @@ class Stream():
else:
if location == '-':
with target.timed_activity("Creating tarball"):
with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
with tarfile.open(fileobj=fo, mode="w|") as tf:
sandbox_vroot.export_to_tar(tf, '.')
# Save the stdout FD to restore later
saved_fd = os.dup(sys.stdout.fileno())
try:
with os.fdopen(sys.stdout.fileno(), 'wb') as fo:
with tarfile.open(fileobj=fo, mode="w|") as tf:
sandbox_vroot.export_to_tar(tf, '.')
finally:
# No matter what, restore stdout for further use
os.dup2(saved_fd, sys.stdout.fileno())
os.close(saved_fd)
else:
with target.timed_activity("Creating tarball '{}'"
.format(location)):
......@@ -447,70 +454,77 @@ class Stream():
# track_first (bool): Whether to track and fetch first
# force (bool): Whether to ignore contents in an existing directory
#
def workspace_open(self, target, directory, *,
def workspace_open(self, targets, directories, *,
no_checkout,
track_first,
force):
if track_first:
track_targets = (target,)
track_targets = targets
else:
track_targets = ()
elements, track_elements = self._load((target,), track_targets,
elements, track_elements = self._load(targets, track_targets,
selection=PipelineSelection.REDIRECT,
track_selection=PipelineSelection.REDIRECT)
target = elements[0]
directory = os.path.abspath(directory)
if not list(target.sources()):
build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
if not build_depends:
raise StreamError("The given element has no sources")
detail = "Try opening a workspace on one of its dependencies instead:\n"
detail += " \n".join(build_depends)
raise StreamError("The given element has no sources", detail=detail)
workspaces = self._context.get_workspaces()
# Check for workspace config
workspace = workspaces.get_workspace(target._get_full_name())
if workspace and not force:
raise StreamError("Workspace '{}' is already defined at: {}"
.format(target.name, workspace.get_absolute_path()))
# If we're going to checkout, we need at least a fetch,
# if we were asked to track first, we're going to fetch anyway.
#
if not no_checkout or track_first:
track_elements = []
if track_first:
track_elements = elements
self._fetch(elements, track_elements=track_elements)
if not no_checkout and target._get_consistency() != Consistency.CACHED:
raise StreamError("Could not stage uncached source. " +
"Use `--track` to track and " +
"fetch the latest version of the " +
"source.")
for target, directory_dict in zip(elements, directories):
if directory_dict['force']:
directory = directory_dict['dir']
else:
directory = os.path.abspath(os.path.join(self._context.workspacedir, directory_dict['dir']))
if not list(target.sources()):
build_depends = [x.name for x in target.dependencies(Scope.BUILD, recurse=False)]
if not build_depends:
raise StreamError("The given element has no sources")
detail = "Try opening a workspace on one of its dependencies instead:\n"
detail += " \n".join(build_depends)
raise StreamError("The given element has no sources", detail=detail)
# Check for workspace config
workspace = workspaces.get_workspace(target._get_full_name())
if workspace and not force:
raise StreamError("Workspace '{}' is already defined at: {}"
.format(target.name, workspace.get_absolute_path()))
# If we're going to checkout, we need at least a fetch,
# if we were asked to track first, we're going to fetch anyway.
#
if not no_checkout or track_first:
track_elements = []
if track_first:
track_elements = elements
self._fetch(elements, track_elements=track_elements)
if not no_checkout and target._get_consistency() != Consistency.CACHED:
raise StreamError("Could not stage uncached source. " +
"Use `--track` to track and " +
"fetch the latest version of the " +
"source.")
if workspace:
workspaces.delete_workspace(target._get_full_name())
workspaces.save_config()
shutil.rmtree(directory)
try:
os.makedirs(directory, exist_ok=True)
except OSError as e:
raise StreamError("Failed to create workspace directory: {}".format(e)) from e
if workspace:
workspaces.delete_workspace(target._get_full_name())
workspaces.save_config()
shutil.rmtree(directory)
try:
os.makedirs(directory, exist_ok=True)
except OSError as e:
raise StreamError("Failed to create workspace directory: {}".format(e)) from e
workspaces.create_workspace(target._get_full_name(), directory)
workspaces.create_workspace(target._get_full_name(), directory)
if not no_checkout:
with target.timed_activity("Staging sources to {}".format(directory)):
target._open_workspace()
if not no_checkout:
with target.timed_activity("Staging sources to {}".format(directory)):
target._open_workspace()
workspaces.save_config()
self._message(MessageType.INFO, "Saved workspace configuration")
# Saving the workspace once it is set up means that if the next one fails before
# the configuration gets saved we dont end up with the good workspace not being saved
workspaces.save_config()
self._message(MessageType.INFO, "Added element {} to the workspace configuration"
.format(target._get_full_name()))
# workspace_close
#
......
......@@ -22,6 +22,9 @@ artifactdir: ${XDG_CACHE_HOME}/buildstream/artifacts
# Location to store build logs
logdir: ${XDG_CACHE_HOME}/buildstream/logs
# Default root location for workspacesi, blank for no default set.
workspacedir: .
#
# Cache
#
......
......@@ -55,7 +55,7 @@ def test_open_workspace(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
result = cli.run(project=project, args=['workspace', 'open', '-f', 'hello.bst', workspace_dir])
result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst', ])
result.assert_success()
result = cli.run(project=project, args=['workspace', 'list'])
......@@ -72,7 +72,7 @@ def test_make_change_in_workspace(cli, tmpdir, datafiles):
project = os.path.join(datafiles.dirname, datafiles.basename)
workspace_dir = os.path.join(str(tmpdir), "workspace_hello")
result = cli.run(project=project, args=['workspace', 'open', '-f', 'hello.bst', workspace_dir])
result = cli.run(project=project, args=['workspace', 'open', '-f', '--directory', workspace_dir, 'hello.bst'])
result.assert_success()
result = cli.run(project=project, args=['workspace', 'list'])
......
......@@ -44,7 +44,7 @@ def test_open_cross_junction_workspace(cli, tmpdir, datafiles):
workspace_dir = os.path.join(str(tmpdir), "workspace_hello_junction")
result = cli.run(project=project,
args=['workspace', 'open', 'hello-junction.bst:hello.bst', workspace_dir])
args=['workspace', 'open', '--directory', workspace_dir, 'hello-junction.bst:hello.bst'])
result.assert_success()
result = cli.run(project=project,
......
......@@ -128,7 +128,6 @@ def test_build_checkout_tarball(datafiles, cli):
assert os.path.join('.', 'usr', 'include', 'pony.h') in tar.getnames()
@pytest.mark.skip(reason="Capturing the binary output is causing a stacktrace")
@pytest.mark.datafiles(DATA_DIR)
def test_build_checkout_tarball_stdout(datafiles, cli):
project = os.path.join(datafiles.dirname, datafiles.basename)
......@@ -143,7 +142,7 @@ def test_build_checkout_tarball_stdout(datafiles, cli):
checkout_args = ['checkout', '--tar', 'target.bst', '-']
result = cli.run(project=project, args=checkout_args)
result = cli.run(project=project, args=checkout_args, binary_capture=True)
result.assert_success()
with open(tarball, 'wb') as f:
......@@ -510,7 +509,7 @@ def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
# Now open a workspace on the junction
#
result = cli.run(project=project, args=['workspace', 'open', 'junction.bst', workspace])
result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, 'junction.bst'])
result.assert_success()
filename = os.path.join(workspace, 'files', 'etc-files', 'etc', 'animal.conf')
......
......@@ -47,7 +47,7 @@ def open_cross_junction(cli, tmpdir):
workspace = tmpdir.join("workspace")
element = 'sub.bst:data.bst'
args = ['workspace', 'open', element, str(workspace)]
args = ['workspace', 'open', '--directory', str(workspace), element]
result = cli.run(project=project, args=args)
result.assert_success()
......
......@@ -21,6 +21,7 @@
# Phillip Smyth <phillip.smyth@codethink.co.uk>
# Jonathan Maw <jonathan.maw@codethink.co.uk>
# Richard Maw <richard.maw@codethink.co.uk>
# William Salmon <will.salmon@codethink.co.uk>
#
import os
......@@ -43,72 +44,195 @@ DATA_DIR = os.path.join(
)
class WorkspaceCreater():
def __init__(self, cli, tmpdir, datafiles, project_path=None):
self.cli = cli
self.tmpdir = tmpdir
self.datafiles = datafiles
if not project_path:
project_path = os.path.join(datafiles.dirname, datafiles.basename)
else:
shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
self.project_path = project_path
self.bin_files_path = os.path.join(project_path, 'files', 'bin-files')
self.workspace_cmd = os.path.join(self.project_path, 'workspace_cmd')
def create_workspace_element(self, kind, track, suffix='', workspace_dir=None,
element_attrs=None):
element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
element_path = os.path.join(self.project_path, 'elements')
if not workspace_dir:
workspace_dir = os.path.join(self.workspace_cmd, element_name.rstrip('.bst'))
# Create our repo object of the given source type with
# the bin files, and then collect the initial ref.
repo = create_repo(kind, str(self.tmpdir))
ref = repo.create(self.bin_files_path)
if track:
ref = None
# Write out our test target
element = {
'kind': 'import',
'sources': [
repo.source_config(ref=ref)
]
}
if element_attrs:
element = {**element, **element_attrs}
_yaml.dump(element,
os.path.join(element_path,
element_name))
return element_name, element_path, workspace_dir
def create_workspace_elements(self, kinds, track, suffixs=None, workspace_dir_usr=None,
element_attrs=None):
results = []
if suffixs is None:
suffixs = ['', ] * len(kinds)
else:
if len(suffixs) != len(kinds):
raise "terable error"
for suffix, kind in zip(suffixs, kinds):
element_name, element_path, workspace_dir = \
self.create_workspace_element(kind, track, suffix, workspace_dir_usr,
element_attrs)
# Assert that there is no reference, a track & fetch is needed
state = self.cli.get_element_state(self.project_path, element_name)
if track:
assert state == 'no reference'
else:
assert state == 'fetch needed'
results.append((element_name, workspace_dir))
return results
def open_workspaces(self, kinds, track, suffixs=None, workspace_dir=None,
element_attrs=None):
results = self.create_workspace_elements(kinds, track, suffixs, workspace_dir,
element_attrs)
os.makedirs(self.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
# tracking & fetching the source from the repo.
args = ['workspace', 'open']
if track:
args.append('--track')
if workspace_dir is not None:
assert len(results) == 1, "test logic error"
_, workspace_dir = results[0]
args.extend(['--directory', workspace_dir])
args.extend([element_name for element_name, workspace_dir_suffix in results])
result = self.cli.run(cwd=self.workspace_cmd, project=self.project_path, args=args)
result.assert_success()
for element_name, workspace_dir in results:
# Assert that we are now buildable because the source is
# now cached.
assert self.cli.get_element_state(self.project_path, element_name) == 'buildable'
# Check that the executable hello file is found in the workspace
filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
assert os.path.exists(filename)
return results
def open_workspace(cli, tmpdir, datafiles, kind, track, suffix='', workspace_dir=None,
project_path=None, element_attrs=None):
if not workspace_dir:
workspace_dir = os.path.join(str(tmpdir), 'workspace{}'.format(suffix))
if not project_path:
project_path = os.path.join(datafiles.dirname, datafiles.basename)
else:
shutil.copytree(os.path.join(datafiles.dirname, datafiles.basename), project_path)
bin_files_path = os.path.join(project_path, 'files', 'bin-files')
element_path = os.path.join(project_path, 'elements')
element_name = 'workspace-test-{}{}.bst'.format(kind, suffix)
workspace_object = WorkspaceCreater(cli, tmpdir, datafiles, project_path)
workspaces = workspace_object.open_workspaces((kind, ), track, (suffix, ), workspace_dir,
element_attrs)
assert len(workspaces) == 1
element_name, workspace = workspaces[0]
return element_name, workspace_object.project_path, workspace
# Create our repo object of the given source type with
# the bin files, and then collect the initial ref.
#
repo = create_repo(kind, str(tmpdir))
ref = repo.create(bin_files_path)
if track:
ref = None
# Write out our test target
element = {
'kind': 'import',
'sources': [
repo.source_config(ref=ref)
]
}
if element_attrs:
element = {**element, **element_attrs}
_yaml.dump(element,
os.path.join(element_path,
element_name))
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("kind", repo_kinds)
def test_open(cli, tmpdir, datafiles, kind):
open_workspace(cli, tmpdir, datafiles, kind, False)
# Assert that there is no reference, a track & fetch is needed
state = cli.get_element_state(project_path, element_name)
if track:
assert state == 'no reference'
else:
assert state == 'fetch needed'
@pytest.mark.datafiles(DATA_DIR)
def test_open_multi(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
workspaces = workspace_object.open_workspaces(repo_kinds, False)
for (elname, workspace), kind in zip(workspaces, repo_kinds):
assert kind in elname
workspace_lsdir = os.listdir(workspace)
if kind == 'git':
assert('.git' in workspace_lsdir)
elif kind == 'bzr':
assert('.bzr' in workspace_lsdir)
else:
assert not ('.git' in workspace_lsdir)
assert not ('.bzr' in workspace_lsdir)
@pytest.mark.datafiles(DATA_DIR)
def test_open_multi_with_directory(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
results = workspace_object.create_workspace_elements(repo_kinds, False, repo_kinds)
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
# tracking & fetching the source from the repo.
args = ['workspace', 'open']
args.extend(['--directory', 'any/dir/should/fail'])
args.extend([element_name for element_name, workspace_dir_suffix in results])
result = workspace_object.cli.run(cwd=workspace_object.workspace_cmd, project=workspace_object.project_path,
args=args)
result.assert_main_error(ErrorDomain.ARTIFACT, None)
assert ("Directory option can only be used if a single element is given" in result.stderr)
@pytest.mark.datafiles(DATA_DIR)
def test_open_defaultlocation(cli, tmpdir, datafiles):
workspace_object = WorkspaceCreater(cli, tmpdir, datafiles)
((element_name, workspace_dir), ) = workspace_object.create_workspace_elements(['git'], False, ['git'])
os.makedirs(workspace_object.workspace_cmd, exist_ok=True)
# Now open the workspace, this should have the effect of automatically
# tracking & fetching the source from the repo.
args = ['workspace', 'open']
if track:
args.append('--track')
args.extend([element_name, workspace_dir])
result = cli.run(project=project_path, args=args)
args.append(element_name)
# In the other tests we set the cmd to workspace_object.workspace_cmd with the optional
# argument, cwd for the workspace_object.cli.run function. But hear we set the default
# workspace location to workspace_object.workspace_cmd and run the cli.run function with
# no cwd option so that it runs in the project directory.
cli.configure({'workspacedir': workspace_object.workspace_cmd})
result = workspace_object.cli.run(project=workspace_object.project_path,
args=args)
result.assert_success()
# Assert that we are now buildable because the source is
# now cached.
assert cli.get_element_state(project_path, element_name) == 'buildable'
assert cli.get_element_state(workspace_object.project_path, element_name) == 'buildable'
# Check that the executable hello file is found in the workspace
# even though the cli.run function was not run with cwd = workspace_object.workspace_cmd
# the workspace should be created in there as we used the 'workspacedir' configuration
# option.
filename = os.path.join(workspace_dir, 'usr', 'bin', 'hello')
assert os.path.exists(filename)
return (element_name, project_path, workspace_dir)
@pytest.mark.datafiles(DATA_DIR)
@pytest.mark.parametrize("kind", repo_kinds)
def test_open(cli, tmpdir, datafiles, kind):
open_workspace(cli, tmpdir, datafiles, kind, False)
@pytest.mark.datafiles(DATA_DIR)
def test_open_bzr_customize(cli, tmpdir, datafiles):
......@@ -150,7 +274,7 @@ def test_open_force(cli, tmpdir, datafiles, kind):
# Now open the workspace again with --force, this should happily succeed
result = cli.run(project=project, args=[
'workspace', 'open', '--force', element_name, workspace
'workspace', 'open', '--force', '--directory', workspace, element_name
])
result.assert_success()
......@@ -165,7 +289,7 @@ def test_open_force_open(cli, tmpdir, datafiles, kind):
# Now open the workspace again with --force, this should happily succeed
result = cli.run(project=project, args=[
'workspace', 'open', '--force', element_name, workspace
'workspace', 'open', '--force', '--directory', workspace, element_name
])
result.assert_success()
......@@ -196,7 +320,7 @@ def test_open_force_different_workspace(cli, tmpdir, datafiles, kind):
# Now open the workspace again with --force, this should happily succeed
result = cli.run(project=project, args=[
'workspace', 'open', '--force', element_name2, workspace
'workspace', 'open', '--force', '--directory', workspace, element_name2
])
# Assert that the file in workspace 1 has been replaced
......@@ -504,7 +628,7 @@ def test_buildable_no_ref(cli, tmpdir, datafiles):
# Now open the workspace. We don't need to checkout the source though.
workspace = os.path.join(str(tmpdir), 'workspace-no-ref')
os.makedirs(workspace)
args = ['workspace', 'open', '--no-checkout', element_name, workspace]
args = ['workspace', 'open', '--no-checkout', '--directory', workspace, element_name]
result = cli.run(project=project, args=args)
result.assert_success()
......@@ -766,7 +890,7 @@ def test_list_supported_workspace(cli, tmpdir, datafiles, workspace_cfg, expecte
element_name))
# Make a change to the workspaces file
result = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
result.assert_success()
result = cli.run(project=project, args=['workspace', 'close', '--remove-dir', element_name])
result.assert_success()
......
......@@ -103,7 +103,7 @@ def test_yamlcache_used(cli, tmpdir, ref_storage, with_junction, move_project):
yc.put_from_key(prj, element_path, key, contents)
# Show that a variable has been added
result = cli.run(project=project, args=['show', '--format', '%{vars}', 'test.bst'])
result = cli.run(project=project, args=['show', '--deps', 'none', '--format', '%{vars}', 'test.bst'])
result.assert_success()
data = yaml.safe_load(result.output)
assert 'modified' in data
......@@ -135,7 +135,7 @@ def test_yamlcache_changed_file(cli, tmpdir, ref_storage, with_junction):
_yaml.load(element_path, copy_tree=False, project=prj, yaml_cache=yc)
# Show that a variable has been added
result = cli.run(project=project, args=['show', '--format', '%{vars}', 'test.bst'])
result = cli.run(project=project, args=['show', '--deps', 'none', '--format', '%{vars}', 'test.bst'])
result.assert_success()
data = yaml.safe_load(result.output)
assert 'modified' in data
......
......@@ -278,7 +278,7 @@ def test_workspace_visible(cli, tmpdir, datafiles):
# Open a workspace on our build failing element
#
res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
assert res.exit_code == 0
# Ensure the dependencies of our build failing element are built
......@@ -312,7 +312,7 @@ def test_sysroot_workspace_visible(cli, tmpdir, datafiles):
# Open a workspace on our build failing element
#
res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
assert res.exit_code == 0
# Ensure the dependencies of our build failing element are built
......
......@@ -23,7 +23,7 @@ def test_workspace_mount(cli, tmpdir, datafiles):
workspace = os.path.join(cli.directory, 'workspace')
element_name = 'workspace/workspace-mount.bst'
res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
assert res.exit_code == 0
res = cli.run(project=project, args=['build', element_name])
......@@ -39,7 +39,7 @@ def test_workspace_commanddir(cli, tmpdir, datafiles):
workspace = os.path.join(cli.directory, 'workspace')
element_name = 'workspace/workspace-commanddir.bst'
res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
assert res.exit_code == 0
res = cli.run(project=project, args=['build', element_name])
......@@ -75,7 +75,7 @@ def test_workspace_updated_dependency(cli, tmpdir, datafiles):
_yaml.dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
assert res.exit_code == 0
# We build the workspaced element, so that we have an artifact
......@@ -130,7 +130,7 @@ def test_workspace_update_dependency_failed(cli, tmpdir, datafiles):
_yaml.dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
assert res.exit_code == 0
# We build the workspaced element, so that we have an artifact
......@@ -205,7 +205,7 @@ def test_updated_dependency_nested(cli, tmpdir, datafiles):
_yaml.dump(dependency, os.path.join(element_path, dep_name))
# First open the workspace
res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
assert res.exit_code == 0
# We build the workspaced element, so that we have an artifact
......@@ -258,7 +258,7 @@ def test_incremental_configure_commands_run_only_once(cli, tmpdir, datafiles):
_yaml.dump(element, os.path.join(element_path, element_name))
# We open a workspace on the above element
res = cli.run(project=project, args=['workspace', 'open', element_name, workspace])
res = cli.run(project=project, args=['workspace', 'open', '--directory', workspace, element_name])
res.assert_success()
# Then we build, and check whether the configure step succeeded
......
......@@ -108,7 +108,7 @@ def test_filter_forbid_also_rdep(datafiles, cli):
def test_filter_workspace_open(datafiles, cli, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
workspace_dir = os.path.join(tmpdir.dirname, tmpdir.basename, "workspace")
result = cli.run(project=project, args=['workspace', 'open', 'deps-permitted.bst', workspace_dir])
result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'deps-permitted.bst'])
result.assert_success()
assert os.path.exists(os.path.join(workspace_dir, "foo"))
assert os.path.exists(os.path.join(workspace_dir, "bar"))
......@@ -120,7 +120,7 @@ def test_filter_workspace_build(datafiles, cli, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
workspace_dir = os.path.join(tempdir, "workspace")
result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
result.assert_success()
src = os.path.join(workspace_dir, "foo")
dst = os.path.join(workspace_dir, "quux")
......@@ -138,7 +138,7 @@ def test_filter_workspace_close(datafiles, cli, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
workspace_dir = os.path.join(tempdir, "workspace")
result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
result.assert_success()
src = os.path.join(workspace_dir, "foo")
dst = os.path.join(workspace_dir, "quux")
......@@ -158,7 +158,7 @@ def test_filter_workspace_reset(datafiles, cli, tmpdir):
project = os.path.join(datafiles.dirname, datafiles.basename)
tempdir = os.path.join(tmpdir.dirname, tmpdir.basename)
workspace_dir = os.path.join(tempdir, "workspace")
result = cli.run(project=project, args=['workspace', 'open', 'output-orphans.bst', workspace_dir])
result = cli.run(project=project, args=['workspace', 'open', '--directory', workspace_dir, 'output-orphans.bst'])
result.assert_success()
src = os.path.join(workspace_dir, "foo")
dst = os.path.join(workspace_dir, "quux")
......
......@@ -17,7 +17,7 @@ import pytest
# CliRunner convenience API (click.testing module) does not support
# separation of stdout/stderr.
#
from _pytest.capture import MultiCapture, FDCapture
from _pytest.capture import MultiCapture, FDCapture, FDCaptureBinary
# Import the main cli entrypoint
from buildstream._frontend import cli as bst_cli
......@@ -234,9 +234,10 @@ class Cli():
# silent (bool): Whether to pass --no-verbose
# env (dict): Environment variables to temporarily set during the test
# args (list): A list of arguments to pass buildstream
# binary_capture (bool): Whether to capture the stdout/stderr as binary
#
def run(self, configure=True, project=None, silent=False, env=None,
cwd=None, options=None, args=None):
cwd=None, options=None, args=None, binary_capture=False):
if args is None:
args = []
if options is None:
......@@ -278,7 +279,7 @@ class Cli():
except ValueError:
sys.__stdout__ = open('/dev/stdout', 'w')
result = self.invoke(bst_cli, bst_args)
result = self.invoke(bst_cli, bst_args, binary_capture=binary_capture)
# Some informative stdout we can observe when anything fails
if self.verbose:
......@@ -295,7 +296,7 @@ class Cli():
return result
def invoke(self, cli, args=None, color=False, **extra):
def invoke(self, cli, args=None, color=False, binary_capture=False, **extra):
exc_info = None
exception = None
exit_code = 0
......@@ -305,8 +306,8 @@ class Cli():
old_stdin = sys.stdin
with open(os.devnull) as devnull:
sys.stdin = devnull
capture = MultiCapture(out=True, err=True, in_=False, Capture=FDCapture)
capture_kind = FDCaptureBinary if binary_capture else FDCapture
capture = MultiCapture(out=True, err=True, in_=False, Capture=capture_kind)
capture.start_capturing()
try:
......