diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index e5325934c3c7a8d8f21c5bcb73a431f6c241a61b..132f7f71a8d66f6457b4e1fe52afc7087916be6b 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -1135,10 +1135,10 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression,
               help='The dependency artifacts to pull')
 @click.option('--remote', '-r', default=None,
               help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
+@click.argument('artifacts', nargs=-1,
                 type=click.Path(readable=False))
 @click.pass_obj
-def artifact_pull(app, elements, deps, remote):
+def artifact_pull(app, artifacts, deps, remote):
     """Pull a built artifact from the configured remote artifact cache.
 
     Specifying no elements will result in pulling the default targets
@@ -1162,12 +1162,12 @@ def artifact_pull(app, elements, deps, remote):
     with app.initialized(session_name="Pull"):
         ignore_junction_targets = False
 
-        if not elements:
-            elements = app.project.get_default_targets()
+        if not artifacts:
+            artifacts = app.project.get_default_targets()
             # Junction elements cannot be pulled, exclude them from default targets
             ignore_junction_targets = True
 
-        app.stream.pull(elements, selection=deps, remote=remote,
+        app.stream.pull(artifacts, selection=deps, remote=remote,
                         ignore_junction_targets=ignore_junction_targets)
 
 
@@ -1180,10 +1180,10 @@ def artifact_pull(app, elements, deps, remote):
               help='The dependencies to push')
 @click.option('--remote', '-r', default=None,
               help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
+@click.argument('artifacts', nargs=-1,
                 type=click.Path(readable=False))
 @click.pass_obj
-def artifact_push(app, elements, deps, remote):
+def artifact_push(app, artifacts, deps, remote):
     """Push a built artifact to a remote artifact cache.
 
     Specifying no elements will result in pushing the default targets
@@ -1209,12 +1209,12 @@ def artifact_push(app, elements, deps, remote):
     with app.initialized(session_name="Push"):
         ignore_junction_targets = False
 
-        if not elements:
-            elements = app.project.get_default_targets()
+        if not artifacts:
+            artifacts = app.project.get_default_targets()
             # Junction elements cannot be pushed, exclude them from default targets
             ignore_junction_targets = True
 
-        app.stream.push(elements, selection=deps, remote=remote,
+        app.stream.push(artifacts, selection=deps, remote=remote,
                         ignore_junction_targets=ignore_junction_targets)
 
 
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 9cff40868d77ad77f06ba69375935d96e6d2708e..6cbf3d8892b78ad2085e480f9277ea00961fffa8 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -478,13 +478,8 @@ class Project():
             #          2. The ArtifactCache.contains() method expects an Element
             #             and a key, not a ref.
             #
-            artifactdir = self._context.artifactdir
             artifacts = []
             for ref in targets:
-                if not os.path.exists(os.path.join(artifactdir, ref)):
-                    raise LoadError("{}\nis not present in the artifact cache ({})".format(ref, artifactdir),
-                                    LoadErrorReason.MISSING_FILE)
-
                 artifacts.append(ArtifactElement._new_from_artifact_ref(ref, self._context, task))
 
         ArtifactElement._clear_artifact_refs_cache()
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index f0e891dcf557dae5263d4a52c665642055fe387c..083dc1c43e00d5310f50463aa1151c7102af10d1 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -414,7 +414,8 @@ class Stream():
                                  selection=selection,
                                  ignore_junction_targets=ignore_junction_targets,
                                  use_artifact_config=use_config,
-                                 artifact_remote_url=remote)
+                                 artifact_remote_url=remote,
+                                 load_refs=True)
 
         if not self._artifacts.has_fetch_remotes():
             raise StreamError("No artifact caches available for pulling artifacts")
@@ -455,7 +456,8 @@ class Stream():
                                  selection=selection,
                                  ignore_junction_targets=ignore_junction_targets,
                                  use_artifact_config=use_config,
-                                 artifact_remote_url=remote)
+                                 artifact_remote_url=remote,
+                                 load_refs=True)
 
         if not self._artifacts.has_push_remotes():
             raise StreamError("No artifact caches available for pushing artifacts")
@@ -558,7 +560,8 @@ class Stream():
                 self._export_artifact(tar, location, compression, target, hardlinks, virdir)
             except AttributeError as e:
                 raise ArtifactError("Artifact reference '{}' seems to be invalid. "
-                                    "Note that an Element name can also be used.".format(artifact))from e
+                                    "Note that an Element name can also be used."
+                                    .format(artifact._element.get_artifact_name())) from e
         else:
             try:
                 with target._prepare_sandbox(scope=scope, directory=None,
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index 98b179b9e3171cc7b5573750cd4202473134b5b9..6281217b70389a4e82e2befe4298e0dd33821431 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -358,7 +358,7 @@ def test_build_checkout_invalid_ref(datafiles, cli):
     checkout_args = ['artifact', 'checkout', '--deps', 'none', '--tar', checkout, non_existent_artifact]
     result = cli.run(project=project, args=checkout_args)
 
-    assert "{}\nis not present in the artifact cache".format(non_existent_artifact) in result.stderr
+    assert "Artifact reference '{}' seems to be invalid".format(non_existent_artifact) in result.stderr
 
 
 @pytest.mark.datafiles(DATA_DIR)
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index fd49ff1efcfd85a30cc5cce352e9142553f3ea85..f978258f1a4dddeaac731f52a5e126b43a06824f 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -558,3 +558,45 @@ def test_pull_access_rights(cli, tmpdir, datafiles):
         st = os.lstat(os.path.join(checkout, 'usr/share/big-file'))
         assert stat.S_ISREG(st.st_mode)
         assert stat.S_IMODE(st.st_mode) == 0o0644
+
+
+# Tests `bst artifact pull $artifact_ref`
+@pytest.mark.datafiles(DATA_DIR)
+def test_pull_artifact(cli, tmpdir, datafiles):
+    project = str(datafiles)
+    element = 'target.bst'
+
+    # Configure a local cache
+    local_cache = os.path.join(str(tmpdir), 'cache')
+    cli.configure({'cachedir': local_cache})
+
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+        # First build the target element and push to the remote.
+        cli.configure({
+            'artifacts': {'url': share.repo, 'push': True}
+        })
+
+        result = cli.run(project=project, args=['build', element])
+        result.assert_success()
+
+        # Assert that the *artifact* is cached locally
+        cache_key = cli.get_element_key(project, element)
+        artifact_ref = os.path.join('test', os.path.splitext(element)[0], cache_key)
+        assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
+
+        # Assert that the target is shared (note that assert shared will use the artifact name)
+        assert_shared(cli, share, project, element)
+
+        # Now we've pushed, remove the local cache
+        shutil.rmtree(os.path.join(local_cache, 'artifacts'))
+
+        # Assert that nothing is cached locally anymore
+        assert not os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
+
+        # Now try bst artifact pull
+        result = cli.run(project=project, args=['artifact', 'pull', artifact_ref])
+        result.assert_success()
+
+        # And assert that it's again in the local cache, without having built
+        assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 9c3947c2a345cbdce6f92fee2b4183781877be4c..4f0fa3c196e80205ccba94717d3e66ac11ffd082 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -98,6 +98,60 @@ def test_push(cli, tmpdir, datafiles):
             assert_shared(cli, share1, project, 'target.bst')
             assert_shared(cli, share2, project, 'target.bst')
 
+
+# Tests `bst artifact push $artifact_ref`
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_artifact(cli, tmpdir, datafiles):
+    project = str(datafiles)
+    element = 'target.bst'
+
+    # Configure a local cache
+    local_cache = os.path.join(str(tmpdir), 'cache')
+    cli.configure({'cachedir': local_cache})
+
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+        # First build it without the artifact cache configured
+        result = cli.run(project=project, args=['build', element])
+        result.assert_success()
+
+        # Assert that the *artifact* is cached locally
+        cache_key = cli.get_element_key(project, element)
+        artifact_ref = os.path.join('test', os.path.splitext(element)[0], cache_key)
+        assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
+
+        # Configure artifact share
+        cli.configure({
+            #
+            # FIXME: This test hangs "sometimes" if we allow
+            #        concurrent push.
+            #
+            #        It's not too bad to ignore since we're
+            #        using the local artifact cache functionality
+            #        only, but it should probably be fixed.
+            #
+            'scheduler': {
+                'pushers': 1
+            },
+            'artifacts': {
+                'url': share.repo,
+                'push': True,
+            }
+        })
+
+        # Now try bst artifact push all the deps
+        result = cli.run(project=project, args=[
+            'artifact', 'push', artifact_ref
+        ])
+        result.assert_success()
+
+        # And finally assert that all the artifacts are in the share
+        #
+        # Note that assert shared tests that an element is shared by obtaining
+        # the artifact ref and asserting that the path exists in the share
+        assert_shared(cli, share, project, element)
+
+
 # Tests that:
 #
 #  * `bst artifact push` fails if the element is not cached locally
@@ -231,6 +285,54 @@ def test_push_all(cli, tmpdir, datafiles):
         assert_shared(cli, share, project, 'import-dev.bst')
         assert_shared(cli, share, project, 'compose-all.bst')
 
+# Tests that `bst artifact push --deps run $artifact_ref` fails
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
+    project = str(datafiles)
+    element = 'checkout-deps.bst'
+
+    # Configure a local cache
+    local_cache = os.path.join(str(tmpdir), 'cache')
+    cli.configure({'cachedir': local_cache})
+
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+        # First build it without the artifact cache configured
+        result = cli.run(project=project, args=['build', element])
+        result.assert_success()
+
+        # Assert that the *artifact* is cached locally
+        cache_key = cli.get_element_key(project, element)
+        artifact_ref = os.path.join('test', os.path.splitext(element)[0], cache_key)
+        assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
+
+        # Configure artifact share
+        cli.configure({
+            #
+            # FIXME: This test hangs "sometimes" if we allow
+            #        concurrent push.
+            #
+            #        It's not too bad to ignore since we're
+            #        using the local artifact cache functionality
+            #        only, but it should probably be fixed.
+            #
+            'scheduler': {
+                'pushers': 1
+            },
+            'artifacts': {
+                'url': share.repo,
+                'push': True,
+            }
+        })
+
+        # Now try bst artifact push all the deps
+        result = cli.run(project=project, args=[
+            'artifact', 'push', '--deps', 'all', artifact_ref
+        ])
+        result.assert_main_error(ErrorDomain.STREAM, None)
+
+        assert "Error: '--deps all' is not supported for artifact refs" in result.stderr
+
 
 # Tests that `bst build` won't push artifacts to the cache it just pulled from.
 #