Skip to content
Snippets Groups Projects
Commit fee93a53 authored by Arber X's avatar Arber X :grinning:
Browse files

Update cas/test_client.py in order to throughly test getTree. Add GRPC limits to getTree method.

parent a848b22f
No related branches found
No related tags found
Loading
......@@ -31,7 +31,7 @@ from buildgrid.utils import merkle_tree_maker
FILE_SIZE_THRESHOLD = 1 * 1024 * 1024
# Maximum size for a single gRPC request:
MAX_REQUEST_SIZE = 2 * 1024 * 1024
MAX_REQUEST_SIZE = 4 * 1024 * 1024
# Maximum number of elements per gRPC request:
MAX_REQUEST_COUNT = 500
......
......@@ -27,6 +27,8 @@ from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_p
from buildgrid.settings import HASH, HASH_LENGTH
from buildgrid.utils import get_hash_type
GRPC_MAX_REQUEST_SIZE = (4 * 1024 * 1024)
class ContentAddressableStorageInstance:
......@@ -72,37 +74,31 @@ class ContentAddressableStorageInstance:
return response
def get_tree(self, request):
def get_tree(self, request, cache):
storage = self._storage
response = re_pb2.GetTreeResponse()
page_size = request.page_size
next_page_token = ""
directories = []
if not page_size:
# MAX_REQUEST_COUNT
page_size = 500
def _get_tree(node_digest):
nonlocal directories, page_size, next_page_token
if next_page_token:
# next page token has been set unwind the stack.
return
if page_size <= 0:
# save the next digest hash in order to continue later
next_page_token = str(node_digest.hash)
nonlocal cache, response
if response.ByteSize() + node_digest.size_bytes >= (GRPC_MAX_REQUEST_SIZE - 100):
response.next_page_token = str(node_digest)
return
hash_node = node_digest.hash
if hash_node in cache:
directory_from_digest = cache[hash_node]
else:
directory_from_digest = storage.get_message(node_digest, re_pb2.Directory)
directories.append(directory_from_digest)
response.directories.extend([directory_from_digest])
cache[hash_node] = directory_from_digest
for directory in directory_from_digest.directories:
page_size -= 1
_get_tree(directory.digest)
return
_get_tree(request.root_digest)
response.directories.extend(directories)
response.next_page_token = next_page_token
return response
......
......@@ -86,10 +86,12 @@ class ContentAddressableStorageService(remote_execution_pb2_grpc.ContentAddressa
def GetTree(self, request, context):
self.__logger.debug("GetTree request from [%s]", context.peer())
cache = {}
try:
instance = self._get_instance(request.instance_name)
while True:
response = instance.get_tree(request)
response = instance.get_tree(request, cache)
if not response.next_page_token:
# This is ugly, but handles the case in which there is only one iteration
# of the while loop. We need to manually raise stopiteration from this generator.
......@@ -98,7 +100,6 @@ class ContentAddressableStorageService(remote_execution_pb2_grpc.ContentAddressa
raise StopIteration # pylint: disable=stop-iteration-return
yield response
request.root_digest.hash = response.next_page_token
except InvalidArgumentError as e:
self.__logger.error(e)
......
File moved
#define HELLO_WORLD "Hello, World!"
......@@ -40,16 +40,31 @@ MESSAGES = [
]
DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'data')
HELLO_DIR = os.path.join(DATA_DIR, 'hello')
HELLO2_DIR = os.path.join(HELLO_DIR, 'hello2')
HELLO3_DIR = os.path.join(HELLO_DIR, 'hello3')
HELLO4_DIR = os.path.join(HELLO3_DIR, 'hello4')
HELLO5_DIR = os.path.join(HELLO4_DIR, 'hello5')
FILES = [
(os.path.join(DATA_DIR, 'void'),),
(os.path.join(DATA_DIR, 'hello.cc'),),
(os.path.join(DATA_DIR, 'hello', 'hello.c'),
os.path.join(DATA_DIR, 'hello', 'hello.h'))]
os.path.join(DATA_DIR, 'hello', 'hello.sh')),
(os.path.join(HELLO2_DIR, 'hello.h'),),
(os.path.join(HELLO5_DIR, 'hello.h'),), ]
FOLDERS = [
(os.path.join(DATA_DIR, 'hello'),)]
(HELLO_DIR, HELLO2_DIR, HELLO3_DIR, HELLO4_DIR, HELLO5_DIR)]
DIRECTORIES = [
(os.path.join(DATA_DIR, 'hello'),),
(os.path.join(DATA_DIR, 'hello'), DATA_DIR)]
(HELLO_DIR,),
(DATA_DIR,),
(HELLO2_DIR,),
(HELLO3_DIR,),
(HELLO4_DIR,),
(HELLO5_DIR,), ]
@pytest.mark.parametrize('blobs', BLOBS)
......@@ -356,7 +371,6 @@ def test_download_directory(instance, folder_paths):
path = os.path.relpath(folder_path, start=DATA_DIR)
path = os.path.join(temp_folder, path)
paths.append(path)
run_in_subprocess(__test_download_directory,
server.remote, instance, digests, paths)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment