Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • edbaunton/buildgrid
  • BuildGrid/buildgrid
  • bloomberg/buildgrid
  • devcurmudgeon/buildgrid
  • mhadjimichael/buildgrid
  • jmacarthur/buildgrid
  • rkothur/buildgrid
  • valentindavid/buildgrid
  • jjardon/buildgrid
  • RichKen/buildgrid
  • jbonney/buildgrid
  • onsha_alexander/buildgrid
  • santigl/buildgrid
  • mostynb/buildgrid
  • hoffbrinkle/buildgrid
  • Malinskiy/buildgrid
  • coldtom/buildgrid
  • azeemb_a/buildgrid
  • pointswaves/buildgrid
  • BenjaminSchubert/buildgrid
  • michaellee8/buildgrid
  • anil-anil/buildgrid
  • seanborg/buildgrid
  • jdelong12/buildgrid
  • jclay/buildgrid
  • bweston92/buildgrid
  • zchen723/buildgrid
  • cpratt34/buildgrid
  • armbiant/apache-buildgrid
  • armbiant/android-buildgrid
  • itsme300/buildgrid
  • sbairoliya/buildgrid
32 results
Show changes
Commits on Source (11)
Showing with 1154 additions and 228 deletions
# Compiled python modules:
/**/__pycache__/
app/**/*.pyc
buildgrid/**/*.pyc
tests/**/*.pyc
# Distribution folder (setuptools):
dist/
# Python egg metadata (setuptools):
/*.egg-info
.eggs
# Testing related things:
.coverage
.coverage.*
.pylint.d/
.pytest_cache/
......@@ -31,7 +31,7 @@ before_script:
- ${BGD} server start &
- sleep 1 # Allow server to boot
- ${BGD} bot --host=0.0.0.0 dummy &
- ${BGD} execute --host=0.0.0.0 request --wait-for-completion
- ${BGD} execute --host=0.0.0.0 request-dummy --wait-for-completion
tests-debian-stretch:
<<: *linux-tests
......
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=
# Add files or directories to the blacklist. They should be base names, not
# paths.
#ignore=CVS,tests,doc
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=.*_pb2.py,.*_pb2_grpc.py
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint.
jobs=1
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Specify a configuration file.
#rcfile=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
# We have two groups of disabled messages:
#
# 1) Messages that are of no use to us
# This is either because we don't follow the convention
# (missing-docstring and protected-access come to mind), or because
# it's not very useful in CI (too-many-arguments, for example)
#
# 2) Messages that we would like to enable at some point
# We introduced linting quite late into the project, so there are
# some issues that just grew out of control. Resolving these would
# be nice, but too much work atm.
#
disable=#####################################
# Messages that are of no use to us #
#####################################
,
fixme,
missing-docstring,
no-self-use,
no-else-return,
protected-access,
too-few-public-methods,
too-many-arguments,
too-many-boolean-expressions,
too-many-branches,
too-many-instance-attributes,
too-many-lines,
too-many-locals,
too-many-nested-blocks,
too-many-public-methods,
too-many-statements,
too-many-return-statements,
too-many-ancestors,
#######################################################
# Messages that we would like to enable at some point #
#######################################################
# Overriden methods don't actually override but redefine
arguments-differ,
duplicate-code,
# Some invalid names are alright, we should configure pylint
# to accept them, and curb the others
invalid-name,
unused-argument,
###########################################################
# Messages that report warnings which should be addressed #
###########################################################
logging-format-interpolation,
cyclic-import,
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio).You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=colorized
# Tells whether to display a full report or only the messages
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=optparse.Values,sys.exit
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=__enter__
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=google.protobuf.any_pb2.Any
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=grpc,buildgrid._protos.*
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
[BASIC]
# Naming style matching correct argument names
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style
#argument-rgx=
# Naming style matching correct attribute names
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Naming style matching correct class attribute names
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style
#class-attribute-rgx=
# Naming style matching correct class names
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-style
#class-rgx=
# Naming style matching correct constant names
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style
#function-rgx=
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_,e,f
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# Naming style matching correct inline iteration names
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style
#inlinevar-rgx=
# Naming style matching correct method names
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style
#method-rgx=
# Naming style matching correct module names
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=abc.abstractproperty
# Naming style matching correct variable names
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style
#variable-rgx=
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=119
# Maximum number of lines in a module
max-module-lines=1000
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,
dict-separator
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[IMPORTS]
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=optparse,tkinter.tix
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of statements in function / method body
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception
......@@ -7,33 +7,17 @@ Feature Additions
We welcome contributions in the form of bug fixes or feature additions / enhancements. Please discuss with us before submitting anything, as we may well have some important context which will could help guide your efforts.
Any major feature additions should be raised as a proposal on the `Mailing List <https://lists.buildgrid.build/cgi-bin/mailman/listinfo/buildgrid/>`_ to be discussed, and then eventually followed up with an issue here on gitlab. We recommend that you propose the feature in advance of commencing work. We are also on irc, but do not have our own dedicated channel - you can find us on #buildstream on GIMPNet and #bazel on freenode.
Any major feature additions should be raised as a proposal on the `Mailing List <https://lists.buildgrid.build/cgi-bin/mailman/listinfo/buildgrid/>`_ to be discussed, and then eventually followed up with an issue here on gitlab. We recommend that you propose the feature in advance of commencing work. We are also on irc - you can find us on #buildgrid on freenode.
The author of any patch is expected to take ownership of that code and is to support it for a reasonable time-frame. This means addressing any unforeseen side effects and quirks the feature may have introduced. More on this below in 'Granting Committer Access'.
Granting Committer Access
-------------------------
We'll hand out commit access to anyone who has successfully landed a single patch to the code base. Please request this via irc or the mailing list.
This of course relies on contributors being responsive and show willingness to address problems after landing branches there should not be any problems here.
What we are expecting of committers here in general is basically to
escalate the review in cases of uncertainty:
* If the patch/branch is very trivial (obvious few line changes or typos etc), and you are confident of the change, there is no need for review.
* If the patch/branch is non trivial, please obtain a review from another committer who is familiar with the area which the branch effects. An approval from someone who is not the patch author will be needed before any merge.
We don't have any detailed policy for "bad actors", but will of course handle things on a case by case basis - commit access should not result in commit wars or be used as a tool to subvert the project when disagreements arise, such incidents (if any) would surely lead to temporary suspension of commit rights.
Patch Submissions
-----------------
We will be running `trunk based development <https://trunkbaseddevelopment.com>`_. The idea behind this is that merge requests to the trunk will be small and made often, thus making the review and merge process as fast as possible. We do not want to end up with a huge backlog of outstanding merge requests. If possible,
it is preferred that merge requests address specific points and clearly outline what problem they are solving.
Branches must be submitted as merge requests on gitlab and should be associated with an issue report on gitlab, whenever possible. If it's a tiny change, we'll accept an MR without it being associated to a gitlab issue, but generally we strongly prefer an issue to be raised in advance. This is so that we can track the work that is currently in progress on the project - please see our Gitlab policy below.
Branches must be submitted as merge requests on gitlab and should be associated with an issue report on gitlab, whenever possible. If it's a small change, we'll accept an MR without it being associated to a gitlab issue, but generally we prefer an issue to be raised in advance. This is so that we can track the work that is currently in progress on the project - please see our Gitlab policy below.
Each commit should address a specific gitlab issue number in the commit message. This is really important for provenance reasons.
......@@ -113,6 +97,21 @@ Remember that with python, the modules (python files) are also symbols
within their containing *package*, as such; modules which are entirely
private to BuildGrid are named as such, e.g. ``_roy.py``.
Granting Committer Access
-------------------------
We'll hand out commit access to anyone who has successfully landed a single patch to the code base. Please request this via irc or the mailing list.
This of course relies on contributors being responsive and show willingness to address problems after landing branches there should not be any problems here.
What we are expecting of committers here in general is basically to
escalate the review in cases of uncertainty:
* If the patch/branch is very trivial (obvious few line changes or typos etc), and you are confident of the change, there is no need for review.
* If the patch/branch is non trivial, please obtain a review from another committer who is familiar with the area which the branch effects. An approval from someone who is not the patch author will be needed before any merge.
We don't have any detailed policy for "bad actors", but will of course handle things on a case by case basis - commit access should not result in commit wars or be used as a tool to subvert the project when disagreements arise, such incidents (if any) would surely lead to temporary suspension of commit rights.
BuildGrid policy for use of Gitlab features
-------------------------------------------
......
......@@ -38,7 +38,7 @@ In one terminal, start a server::
In another terminal, send a request for work::
bgd execute request
bgd execute request-dummy
The stage should show as `QUEUED` as it awaits a bot to pick up the work::
......@@ -51,3 +51,35 @@ Create a bot session::
Show the work as completed::
bgd execute list
Instructions for a Simple Build
-------------------------------
This example covers a simple build. The user will upload a directory containing a C file and a command to the CAS. The bot will then fetch the uploaded directory and command which will then be run inside a temporary directory. The result will then be uploaded to the CAS and downloaded by the user. This is an early demo and still lacks a few features such as symlink support and checking to see if files exist in the CAS before executing a command.
Create a new directory called `test-buildgrid/` and place the following C file in it called `hello.c`::
#include <stdio.h>
int main()
{
printf("Hello, World!");
return 0;
}
Now start a BuildGrid server, passing it a directory it can write a CAS to::
bgd server start --cas disk --cas-cache disk --cas-disk-directory /path/to/empty/directory
Start the following bot session::
bgd bot temp-directory
Upload the directory containing the C file::
bgd cas upload-dir /path/to/test-buildgrid
Now we send an execution request to the bot with the name of the epxected `output-file`, a boolean describing if it is executeable, the path to the directory we uploaded in order to calculate the digest and finally the command to run on the bot::
bgd execute command --output-file hello True /path/to/test-buildgrid -- gcc -Wall hello.c -o hello
The resulting executeable should have returned to a new directory called `testing/`
__version__='0.0.2'
__version__ = '0.0.2'
import logging
def bgd_logger():
formatter = logging.Formatter(fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
formatter = logging.Formatter(
fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
......
# Copyright (C) 2018 Bloomberg LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# <http://www.apache.org/licenses/LICENSE-2.0>
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import tempfile
import grpc
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
from buildgrid._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
from buildgrid.utils import read_file
from google.protobuf import any_pb2
def work_buildbox(context, lease):
logger = context.logger
action_any = lease.payload
action = remote_execution_pb2.Action()
action_any.Unpack(action)
cert_server = read_file(context.server_cert)
cert_client = read_file(context.client_cert)
key_client = read_file(context.client_key)
# create server credentials
credentials = grpc.ssl_channel_credentials(root_certificates=cert_server,
private_key=key_client,
certificate_chain=cert_client)
channel = grpc.secure_channel('{}:{}'.format(context.remote, context.port), credentials)
stub = bytestream_pb2_grpc.ByteStreamStub(channel)
remote_command = _buildstream_fetch_command(context.local_cas, stub, action.command_digest)
environment = dict((x.name, x.value) for x in remote_command.environment_variables)
logger.debug("command hash: {}".format(action.command_digest.hash))
logger.debug("vdir hash: {}".format(action.input_root_digest.hash))
logger.debug("\n{}".format(' '.join(remote_command.arguments)))
command = ['buildbox',
'--remote={}'.format('https://{}:{}'.format(context.remote, context.port)),
'--server-cert={}'.format(context.server_cert),
'--client-key={}'.format(context.client_key),
'--client-cert={}'.format(context.client_cert),
'--local={}'.format(context.local_cas),
'--chdir={}'.format(environment['PWD']),
context.fuse_dir]
command.extend(remote_command.arguments)
logger.debug(' '.join(command))
logger.debug("Input root digest:\n{}".format(action.input_root_digest))
logger.info("Launching process")
proc = subprocess.Popen(command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
std_send = action.input_root_digest.SerializeToString()
std_out, _ = proc.communicate(std_send)
output_root_digest = remote_execution_pb2.Digest()
output_root_digest.ParseFromString(std_out)
logger.debug("Output root digest: {}".format(output_root_digest))
output_file = remote_execution_pb2.OutputDirectory(tree_digest=output_root_digest)
action_result = remote_execution_pb2.ActionResult()
action_result.output_directories.extend([output_file])
action_result_any = any_pb2.Any()
action_result_any.Pack(action_result)
lease.result.CopyFrom(action_result_any)
return lease
def _buildstream_fetch_blob(remote, digest, out):
resource_name = os.path.join(digest.hash, str(digest.size_bytes))
request = bytestream_pb2.ReadRequest()
request.resource_name = resource_name
request.read_offset = 0
for response in remote.Read(request):
out.write(response.data)
out.flush()
assert digest.size_bytes == os.fstat(out.fileno()).st_size
def _buildstream_fetch_command(casdir, remote, digest):
with tempfile.NamedTemporaryFile(dir=os.path.join(casdir, 'tmp')) as out:
_buildstream_fetch_blob(remote, digest, out)
remote_command = remote_execution_pb2.Command()
with open(out.name, 'rb') as f:
remote_command.ParseFromString(f.read())
return remote_command
def _buildstream_fetch_action(casdir, remote, digest):
with tempfile.NamedTemporaryFile(dir=os.path.join(casdir, 'tmp')) as out:
_buildstream_fetch_blob(remote, digest, out)
remote_action = remote_execution_pb2.Action()
with open(out.name, 'rb') as f:
remote_action.ParseFromString(f.read())
return remote_action
# Copyright (C) 2018 Bloomberg LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# <http://www.apache.org/licenses/LICENSE-2.0>
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import time
def work_dummy(context, lease):
""" Just returns lease after some random time
"""
time.sleep(random.randint(1, 5))
return lease
# Copyright (C) 2018 Bloomberg LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# <http://www.apache.org/licenses/LICENSE-2.0>
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import tempfile
from buildgrid.utils import read_file, create_digest, write_fetch_directory, parse_to_pb2_from_fetch
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
from buildgrid._protos.google.bytestream import bytestream_pb2_grpc
from google.protobuf import any_pb2
def work_temp_directory(context, lease):
""" Bot downloads directories and files into a temp directory,
then uploads results back to CAS
"""
instance_name = context.instance_name
stub_bytestream = bytestream_pb2_grpc.ByteStreamStub(context.channel)
action_digest = remote_execution_pb2.Digest()
lease.payload.Unpack(action_digest)
action = remote_execution_pb2.Action()
action = parse_to_pb2_from_fetch(action, stub_bytestream, action_digest, instance_name)
with tempfile.TemporaryDirectory() as temp_dir:
command = remote_execution_pb2.Command()
command = parse_to_pb2_from_fetch(command, stub_bytestream, action.command_digest, instance_name)
arguments = "cd {} &&".format(temp_dir)
for argument in command.arguments:
arguments += " {}".format(argument)
context.logger.info(arguments)
write_fetch_directory(temp_dir, stub_bytestream, action.input_root_digest, instance_name)
subprocess.Popen(arguments,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
result = remote_execution_pb2.ActionResult()
requests = []
for output_file in command.output_files:
path = os.path.join(temp_dir, output_file)
chunk = read_file(path)
digest = create_digest(chunk)
result.output_files.extend([remote_execution_pb2.OutputFile(path=output_file,
digest=digest)])
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
digest=digest, data=chunk))
request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
requests=requests)
stub_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
stub_cas.BatchUpdateBlobs(request)
result_any = any_pb2.Any()
result_any.Pack(result)
lease.result.CopyFrom(result_any)
return lease
......@@ -24,24 +24,27 @@ will be attempted to be imported.
"""
import os
import sys
import click
import logging
import click
from . import _logging
CONTEXT_SETTINGS = dict(auto_envvar_prefix='BUILDGRID')
class Context(object):
class Context:
def __init__(self):
self.verbose = False
self.home = os.getcwd()
pass_context = click.make_pass_decorator(Context, ensure=True)
cmd_folder = os.path.abspath(os.path.join(os.path.dirname(__file__),
'commands'))
class BuildGridCLI(click.MultiCommand):
def list_commands(self, context):
......@@ -60,9 +63,10 @@ class BuildGridCLI(click.MultiCommand):
except ModuleNotFoundError as e:
raise Exception(e)
except ImportError as e:
return
return None
return mod.cli
@click.command(cls=BuildGridCLI, context_settings=CONTEXT_SETTINGS)
@click.option('-v', '--verbose', is_flag=True,
help='Enables verbose mode.')
......@@ -73,4 +77,3 @@ def cli(context, verbose):
context.verbose = verbose
if verbose:
logger.setLevel(logging.DEBUG)
......@@ -22,28 +22,21 @@ Bot command
Create a bot interface and request work
"""
import asyncio
import click
import grpc
import logging
import os
import random
import subprocess
import tempfile
from pathlib import Path, PurePath
import click
import grpc
from buildgrid.bot import bot, bot_interface
from buildgrid.bot.bot_session import BotSession, Device, Worker
from buildgrid._exceptions import BotError
from ..bots import buildbox, dummy, temp_directory
from ..cli import pass_context
from buildgrid._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
from google.protobuf import any_pb2
@click.group(short_help = 'Create a bot client')
@click.group(short_help="Create a bot client")
@click.option('--parent', default='bgd_test')
@click.option('--port', default='50051')
@click.option('--host', default='localhost')
......@@ -54,6 +47,7 @@ def cli(context, host, port, parent):
context.logger = logging.getLogger(__name__)
context.logger.info("Starting on port {}".format(port))
context.channel = channel
worker = Worker()
worker.add_device(Device())
......@@ -63,31 +57,50 @@ def cli(context, host, port, parent):
context.bot_session = bot_session
@cli.command('dummy', short_help='Create a dummy bot session')
@cli.command('dummy', short_help='Create a dummy bot session which just returns lease')
@pass_context
def dummy(context):
def run_dummy(context):
"""
Simple dummy client. Creates a session, accepts leases, does fake work and
updates the server.
"""
try:
b = bot.Bot(context.bot_session)
b.session(_work_dummy,
b.session(dummy.work_dummy,
context)
except KeyboardInterrupt:
pass
@cli.command('buildbox', short_help='Create a bot session with busybox')
@click.option('--fuse-dir', show_default = True, default=str(PurePath(Path.home(), 'fuse')))
@click.option('--local-cas', show_default = True, default=str(PurePath(Path.home(), 'cas')))
@click.option('--client-cert', show_default = True, default=str(PurePath(Path.home(), 'client.crt')))
@click.option('--client-key', show_default = True, default=str(PurePath(Path.home(), 'client.key')))
@click.option('--server-cert', show_default = True, default=str(PurePath(Path.home(), 'server.crt')))
@click.option('--port', show_default = True, default=11001)
@click.option('--remote', show_default = True, default='localhost')
@cli.command('temp-directory', short_help='Runs commands in temp directory and uploads results')
@click.option('--instance-name', default='testing')
@pass_context
def work_buildbox(context, remote, port, server_cert, client_key, client_cert, local_cas, fuse_dir):
def run_temp_directory(context, instance_name):
""" Downloads files and command from CAS and runs
in a temp directory, uploading result back to CAS
"""
context.instance_name = instance_name
try:
b = bot.Bot(context.bot_session)
b.session(temp_directory.work_temp_directory,
context)
except KeyboardInterrupt:
pass
@cli.command('buildbox', short_help="Create a bot session with busybox")
@click.option('--fuse-dir', show_default=True, default=str(PurePath(Path.home(), 'fuse')))
@click.option('--local-cas', show_default=True, default=str(PurePath(Path.home(), 'cas')))
@click.option('--client-cert', show_default=True, default=str(PurePath(Path.home(), 'client.crt')))
@click.option('--client-key', show_default=True, default=str(PurePath(Path.home(), 'client.key')))
@click.option('--server-cert', show_default=True, default=str(PurePath(Path.home(), 'server.crt')))
@click.option('--port', show_default=True, default=11001)
@click.option('--remote', show_default=True, default='localhost')
@pass_context
def run_buildbox(context, remote, port, server_cert, client_key, client_cert, local_cas, fuse_dir):
"""
Uses BuildBox to run commands.
"""
......@@ -103,104 +116,9 @@ def work_buildbox(context, remote, port, server_cert, client_key, client_cert, l
context.fuse_dir = fuse_dir
try:
b = bot.Bot(work=_work_buildbox,
bot_session=context.bot_session,
channel=context.channel,
parent=context.parent)
b.session(context.parent,
_work_buildbox,
context)
b = bot.Bot(context.bot_session)
b.session(work=buildbox.work_buildbox,
context=context)
except KeyboardInterrupt:
pass
async def _work_dummy(context, lease):
await asyncio.sleep(random.randint(1,5))
return lease
async def _work_buildbox(context, lease):
logger = context.logger
action_any = lease.payload
action = remote_execution_pb2.Action()
action_any.Unpack(action)
cert_server = _file_read(context.server_cert)
cert_client = _file_read(context.client_cert)
key_client = _file_read(context.client_key)
# create server credentials
credentials = grpc.ssl_channel_credentials(root_certificates=cert_server,
private_key=key_client,
certificate_chain=cert_client)
channel = grpc.secure_channel('{}:{}'.format(context.remote, context.port), credentials)
stub = bytestream_pb2_grpc.ByteStreamStub(channel)
remote_command = _fetch_command(context.local_cas, stub, action.command_digest)
environment = dict((x.name, x.value) for x in remote_command.environment_variables)
logger.debug("command hash: {}".format(action.command_digest.hash))
logger.debug("vdir hash: {}".format(action.input_root_digest.hash))
logger.debug("\n{}".format(' '.join(remote_command.arguments)))
command = ['buildbox',
'--remote={}'.format('https://{}:{}'.format(context.remote, context.port)),
'--server-cert={}'.format(context.server_cert),
'--client-key={}'.format(context.client_key),
'--client-cert={}'.format(context.client_cert),
'--local={}'.format(context.local_cas),
'--chdir={}'.format(environment['PWD']),
context.fuse_dir]
command.extend(remote_command.arguments)
logger.debug(' '.join(command))
logger.debug("Input root digest:\n{}".format(action.input_root_digest))
logger.info("Launching process")
proc = subprocess.Popen(command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
std_send = action.input_root_digest.SerializeToString()
std_out, std_error = proc.communicate(std_send)
output_root_digest = remote_execution_pb2.Digest()
output_root_digest.ParseFromString(std_out)
logger.debug("Output root digest: {}".format(output_root_digest))
output_file = remote_execution_pb2.OutputDirectory(tree_digest = output_root_digest)
action_result = remote_execution_pb2.ActionResult()
action_result.output_directories.extend([output_file])
action_result_any = any_pb2.Any()
action_result_any.Pack(action_result)
lease.result.CopyFrom(action_result_any)
return lease
def _fetch_blob(remote, digest, out):
resource_name = os.path.join(digest.hash, str(digest.size_bytes))
request = bytestream_pb2.ReadRequest()
request.resource_name = resource_name
request.read_offset = 0
for response in remote.Read(request):
out.write(response.data)
out.flush()
assert digest.size_bytes == os.fstat(out.fileno()).st_size
def _fetch_command(casdir, remote, digest):
with tempfile.NamedTemporaryFile(dir=os.path.join(casdir, 'tmp')) as out:
_fetch_blob(remote, digest, out)
remote_command = remote_execution_pb2.Command()
with open(out.name, 'rb') as f:
remote_command.ParseFromString(f.read())
return remote_command
def _file_read(file_path):
with open(file_path, 'rb') as f:
return f.read()
# Copyright (C) 2018 Bloomberg LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# <http://www.apache.org/licenses/LICENSE-2.0>
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Execute command
=================
Request work to be executed and monitor status of jobs.
"""
import logging
import click
import grpc
from buildgrid.utils import merkle_maker, create_digest
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
from ..cli import pass_context
@click.group(short_help='Interact with the CAS')
@click.option('--port', default='50051')
@click.option('--host', default='localhost')
@pass_context
def cli(context, host, port):
context.logger = logging.getLogger(__name__)
context.logger.info("Starting on port {}".format(port))
context.channel = grpc.insecure_channel('{}:{}'.format(host, port))
context.port = port
@cli.command('upload-files', short_help='Upload files')
@click.argument('files', nargs=-1, type=click.File('rb'))
@click.option('--instance-name', default='testing')
@pass_context
def upload_files(context, files, instance_name):
stub = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
requests = []
for file in files:
chunk = file.read()
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
digest=create_digest(chunk), data=chunk))
request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
requests=requests)
context.logger.info("Sending: {}".format(request))
response = stub.BatchUpdateBlobs(request)
context.logger.info("Response: {}".format(response))
@cli.command('upload-dir', short_help='Upload files')
@click.argument('directory')
@click.option('--instance-name', default='testing')
@pass_context
def upload_dir(context, directory, instance_name):
context.logger.info("Uploading directory to cas")
stub = remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel)
requests = []
for chunk, file_digest in merkle_maker(directory):
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
digest=file_digest, data=chunk))
request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
requests=requests)
context.logger.info("Request:\n{}".format(request))
response = stub.BatchUpdateBlobs(request)
context.logger.info("Response:\n{}".format(response))
......@@ -22,20 +22,22 @@ Execute command
Request work to be executed and monitor status of jobs.
"""
import errno
import logging
import stat
import os
import click
import grpc
import logging
import sys
import time
from ..cli import pass_context
from buildgrid.utils import merkle_maker, create_digest, write_fetch_blob
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import ExecuteOperationMetadata
from buildgrid._protos.google.bytestream import bytestream_pb2_grpc
from buildgrid._protos.google.longrunning import operations_pb2, operations_pb2_grpc
from google.protobuf import any_pb2
@click.group(short_help = "Simple execute client")
from ..cli import pass_context
@click.group(short_help='Simple execute client')
@click.option('--port', default='50051')
@click.option('--host', default='localhost')
@pass_context
......@@ -46,22 +48,24 @@ def cli(context, host, port):
context.channel = grpc.insecure_channel('{}:{}'.format(host, port))
context.port = port
@cli.command('request', short_help='Send a dummy action')
@cli.command('request-dummy', short_help='Send a dummy action')
@click.option('--number', default=1)
@click.option('--instance-name', default='testing')
@click.option('--wait-for-completion', is_flag=True)
@click.option('--wait-for-completion', is_flag=True, help='Stream updates until jobs are completed')
@pass_context
def request(context, number, instance_name, wait_for_completion):
def request_dummy(context, number, instance_name, wait_for_completion):
action_digest = remote_execution_pb2.Digest()
context.logger.info("Sending execution request...\n")
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
request = remote_execution_pb2.ExecuteRequest(instance_name = instance_name,
action_digest = action_digest,
skip_cache_lookup = True)
responses = []
for i in range(0, number):
request = remote_execution_pb2.ExecuteRequest(instance_name=instance_name,
action_digest=action_digest,
skip_cache_lookup=True)
responses = list()
for _ in range(0, number):
responses.append(stub.Execute(request))
for response in responses:
......@@ -71,6 +75,7 @@ def request(context, number, instance_name, wait_for_completion):
else:
context.logger.info(next(response))
@cli.command('status', short_help='Get the status of an operation')
@click.argument('operation-name')
@pass_context
......@@ -83,6 +88,7 @@ def operation_status(context, operation_name):
response = stub.GetOperation(request)
context.logger.info(response)
@cli.command('list', short_help='List operations')
@pass_context
def list_operations(context):
......@@ -93,13 +99,14 @@ def list_operations(context):
response = stub.ListOperations(request)
if len(response.operations) < 1:
if not response.operations:
context.logger.warning("No operations to list")
return
for op in response.operations:
context.logger.info(op)
@cli.command('wait', short_help='Streams an operation until it is complete')
@click.argument('operation-name')
@pass_context
......@@ -111,3 +118,86 @@ def wait_execution(context, operation_name):
for stream in response:
context.logger.info(stream)
@cli.command('command', short_help='Send a command to be executed')
@click.argument('input-root')
@click.argument('commands', nargs=-1)
@click.option('--output-file', nargs=2, type=(str, bool), multiple=True,
help='{Expected output file, is_executeable flag}')
@click.option('--output-directory', default='testing', help='Output directory for output files')
@click.option('--instance-name', default='testing')
@pass_context
def command(context, input_root, commands, output_file, output_directory, instance_name):
stub = remote_execution_pb2_grpc.ExecutionStub(context.channel)
execute_command = remote_execution_pb2.Command()
for arg in commands:
execute_command.arguments.extend([arg])
output_executeables = []
for file, is_executeable in output_file:
execute_command.output_files.extend([file])
if is_executeable:
output_executeables.append(file)
command_digest = create_digest(execute_command.SerializeToString())
context.logger.info(command_digest)
# TODO: Check for missing blobs
digest = None
for _, digest in merkle_maker(input_root):
pass
action = remote_execution_pb2.Action(command_digest=command_digest,
input_root_digest=digest,
do_not_cache=True)
action_digest = create_digest(action.SerializeToString())
context.logger.info("Sending execution request...\n")
requests = []
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
digest=command_digest, data=command.SerializeToString()))
requests.append(remote_execution_pb2.BatchUpdateBlobsRequest.Request(
digest=action_digest, data=action.SerializeToString()))
request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name=instance_name,
requests=requests)
remote_execution_pb2_grpc.ContentAddressableStorageStub(context.channel).BatchUpdateBlobs(request)
request = remote_execution_pb2.ExecuteRequest(instance_name=instance_name,
action_digest=action_digest,
skip_cache_lookup=True)
response = stub.Execute(request)
stub = bytestream_pb2_grpc.ByteStreamStub(context.channel)
stream = None
for stream in response:
context.logger.info(stream)
execute_response = remote_execution_pb2.ExecuteResponse()
stream.response.Unpack(execute_response)
for output_file_response in execute_response.result.output_files:
path = os.path.join(output_directory, output_file_response.path)
if not os.path.exists(os.path.dirname(path)):
try:
os.makedirs(os.path.dirname(path))
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
with open(path, 'wb+') as f:
write_fetch_blob(f, stub, output_file_response.digest, instance_name)
if output_file_response.path in output_executeables:
st = os.stat(path)
os.chmod(path, st.st_mode | stat.S_IXUSR)
......@@ -22,11 +22,11 @@ Server command
Create a BuildGrid server.
"""
import asyncio
import click
import logging
import click
from buildgrid.server import build_grid_server
from buildgrid.server.action_cache import ActionCache
from buildgrid.server.cas.storage.disk import DiskStorage
......@@ -36,34 +36,38 @@ from buildgrid.server.cas.storage.with_cache import WithCacheStorage
from ..cli import pass_context
@click.group(short_help = "Start local server")
_SIZE_PREFIXES = {'k': 2 ** 10, 'm': 2 ** 20, 'g': 2 ** 30, 't': 2 ** 40}
@click.group(short_help="Start local server")
@pass_context
def cli(context):
context.logger = logging.getLogger(__name__)
context.logger.info("BuildGrid server booting up")
@cli.command('start', short_help='Starts server')
@cli.command('start', short_help="Starts server")
@click.option('--port', default='50051')
@click.option('--max-cached-actions', type=int, default=50,
help='Maximum number of actions to keep in the ActionCache.')
help="Maximum number of actions to keep in the ActionCache.")
@click.option('--allow-update-action-result/--forbid-update-action-result',
'allow_uar', default=True,
help='Whether or not to allow clients to manually edit the action cache.')
help="Whether or not to allow clients to manually edit the action cache.")
@click.option('--cas',
type=click.Choice(('lru', 's3', 'disk', 'with-cache')),
help='CAS storage type to use.')
help="CAS storage type to use.")
@click.option('--cas-cache',
type=click.Choice(('lru', 's3', 'disk')),
help='For --cas=with-cache, the CAS storage to use as the cache.')
help="For --cas=with-cache, the CAS storage to use as the cache.")
@click.option('--cas-fallback',
type=click.Choice(('lru', 's3', 'disk')),
help='For --cas=with-cache, the CAS storage to use as the fallback.')
@click.option('--cas-lru-size', help='For --cas=lru, the LRU cache\'s memory limit.')
@click.option('--cas-s3-bucket', help='For --cas=s3, the bucket name.')
@click.option('--cas-s3-endpoint', help='For --cas=s3, the endpoint URI.')
help="For --cas=with-cache, the CAS storage to use as the fallback.")
@click.option('--cas-lru-size', help="For --cas=lru, the LRU cache's memory limit.")
@click.option('--cas-s3-bucket', help="For --cas=s3, the bucket name.")
@click.option('--cas-s3-endpoint', help="For --cas=s3, the endpoint URI.")
@click.option('--cas-disk-directory',
type=click.Path(file_okay=False, dir_okay=True, writable=True),
help='For --cas=disk, the folder to store CAS blobs in.')
help="For --cas=disk, the folder to store CAS blobs in.")
@pass_context
def start(context, port, max_cached_actions, allow_uar, cas, **cas_args):
context.logger.info("Starting on port {}".format(port))
......@@ -91,11 +95,13 @@ def start(context, port, max_cached_actions, allow_uar, cas, **cas_args):
loop.run_until_complete(server.stop())
loop.close()
@cli.command('stop', short_help='Stops server')
@cli.command('stop', short_help="Stops server")
@pass_context
def stop(context):
context.logger.error("Not implemented yet")
def _make_cas_storage(context, cas_type, cas_args):
"""Returns the storage provider corresponding to the given `cas_type`,
or None if the provider cannot be created.
......@@ -135,8 +141,9 @@ def _make_cas_storage(context, cas_type, cas_args):
return WithCacheStorage(cache, fallback)
elif cas_type is None:
return None
return None
_SIZE_PREFIXES = {'k': 2 ** 10, 'm': 2 ** 20, 'g': 2 ** 30, 't': 2 ** 40}
def _parse_size(size):
"""Convert a string containing a size in bytes (e.g. '2GB') to a number."""
size = size.lower()
......
......@@ -22,39 +22,45 @@ Exceptions
from enum import Enum
""" Base BuildGrid Error class for internal exceptions.
"""
class BgdError(Exception):
"""
Base BuildGrid Error class for internal exceptions.
"""
def __init__(self, message, *, detail=None, domain=None, reason=None):
super().__init__(message)
""" Any additional detail and extra information
"""
# Additional detail and extra information
self.detail = detail
""" Domand and reason.
"""
# Domand and reason
self.domain = domain
self.reason = reason
class ErrorDomain(Enum):
SERVER = 1
SERVER = 1
EXECUTION = 2
WORKER = 3
BOT = 4
WORKER = 3
BOT = 4
class ServerError(BgdError):
def __init__(self, message, detail=None, reason=None):
super().__init__(message, detail=detail, domain=ErrorDomain.SERVER, reason=reason)
class ExecutionError(BgdError):
def __init__(self, message, detail=None, reason=None):
super().__init__(message, detail=detail, domain=ErrorDomain.EXECUTION, reason=reason)
class WorkerError(BgdError):
def __init__(self, message, detail=None, reason=None):
super().__init__(message, detail=detail, domain=ErrorDomain.WORKER, reason=reason)
class BotError(BgdError):
def __init__(self, message, detail=None, reason=None):
super().__init__(message, detail=detail, domain=ErrorDomain.BOT, reason=reason)
......@@ -23,13 +23,8 @@ Creates a bot session.
"""
import asyncio
import collections
import logging
import time
from . import bot_interface, bot_session
from .bot_session import BotStatus, LeaseState
from .._exceptions import BotError
class Bot:
"""
......@@ -50,18 +45,16 @@ class Bot:
try:
task = asyncio.ensure_future(self._update_bot_session())
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
task.cancel()
loop.close()
async def _update_bot_session(self):
"""
Calls the server periodically to inform the server the client has not died.
"""
while True:
""" Calls the server periodically to inform the server the client
has not died.
"""
self._bot_session.update_bot_session()
await asyncio.sleep(self._update_period)
......@@ -22,15 +22,14 @@ Bot Interface
Interface to grpc
"""
import logging
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2, bots_pb2_grpc
from .._exceptions import BotError
class BotInterface:
""" Interface handles calls to the server.
"""
Interface handles calls to the server.
"""
def __init__(self, channel):
......@@ -39,12 +38,12 @@ class BotInterface:
self._stub = bots_pb2_grpc.BotsStub(channel)
def create_bot_session(self, parent, bot_session):
request = bots_pb2.CreateBotSessionRequest(parent = parent,
bot_session = bot_session)
request = bots_pb2.CreateBotSessionRequest(parent=parent,
bot_session=bot_session)
return self._stub.CreateBotSession(request)
def update_bot_session(self, bot_session, update_mask = None):
request = bots_pb2.UpdateBotSessionRequest(name = bot_session.name,
bot_session = bot_session,
update_mask = update_mask)
def update_bot_session(self, bot_session, update_mask=None):
request = bots_pb2.UpdateBotSessionRequest(name=bot_session.name,
bot_session=bot_session,
update_mask=update_mask)
return self._stub.UpdateBotSession(request)
......@@ -27,19 +27,39 @@ from enum import Enum
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2, worker_pb2
class BotStatus(Enum):
# Default value.
BOT_STATUS_UNSPECIFIED = bots_pb2.BotStatus.Value('BOT_STATUS_UNSPECIFIED')
OK = bots_pb2.BotStatus.Value('OK')
UNHEALTHY = bots_pb2.BotStatus.Value('UNHEALTHY');
HOST_REBOOTING = bots_pb2.BotStatus.Value('HOST_REBOOTING')
BOT_TERMINATING = bots_pb2.BotStatus.Value('BOT_TERMINATING')
# The bot is healthy, and will accept leases as normal.
OK = bots_pb2.BotStatus.Value('OK')
# The bot is unhealthy and will not accept new leases.
UNHEALTHY = bots_pb2.BotStatus.Value('UNHEALTHY')
# The bot has been asked to reboot the host.
HOST_REBOOTING = bots_pb2.BotStatus.Value('HOST_REBOOTING')
# The bot has been asked to shut down.
BOT_TERMINATING = bots_pb2.BotStatus.Value('BOT_TERMINATING')
class LeaseState(Enum):
# Default value.
LEASE_STATE_UNSPECIFIED = bots_pb2.LeaseState.Value('LEASE_STATE_UNSPECIFIED')
PENDING = bots_pb2.LeaseState.Value('PENDING')
ACTIVE = bots_pb2.LeaseState.Value('ACTIVE')
COMPLETED = bots_pb2.LeaseState.Value('COMPLETED')
CANCELLED = bots_pb2.LeaseState.Value('CANCELLED')
# The server expects the bot to accept this lease.
PENDING = bots_pb2.LeaseState.Value('PENDING')
# The bot has accepted this lease.
ACTIVE = bots_pb2.LeaseState.Value('ACTIVE')
# The bot is no longer leased.
COMPLETED = bots_pb2.LeaseState.Value('COMPLETED')
# The bot should immediately release all resources associated with the lease.
CANCELLED = bots_pb2.LeaseState.Value('CANCELLED')
class BotSession:
......@@ -54,6 +74,7 @@ class BotSession:
self.logger = logging.getLogger(__name__)
self._bot_id = '{}.{}'.format(parent, platform.node())
self._context = None
self._interface = interface
self._leases = {}
self._name = None
......@@ -83,14 +104,15 @@ class BotSession:
self._update_lease_from_server(lease)
def update_bot_session(self):
self.logger.debug("Updating bot session: {}".format(self._bot_id))
session = self._interface.update_bot_session(self.get_pb2())
for lease in session.leases:
self._update_lease_from_server(lease)
for k, v in self._leases.items():
for k, v in list(self._leases.items()):
if v.state == LeaseState.COMPLETED.value:
del self._leases[k]
for lease in session.leases:
self._update_lease_from_server(lease)
def get_pb2(self):
leases = list(self._leases.values())
if not leases:
......@@ -100,7 +122,7 @@ class BotSession:
status=self._status,
leases=leases,
bot_id=self._bot_id,
name = self._name)
name=self._name)
def lease_completed(self, lease):
lease.state = LeaseState.COMPLETED.value
......@@ -110,19 +132,23 @@ class BotSession:
"""
State machine for any recieved updates to the leases.
"""
## TODO: Compare with previous state of lease
lease_bot = self._leases.get(lease.id)
# TODO: Compare with previous state of lease
if lease.state == LeaseState.PENDING.value:
lease.state = LeaseState.ACTIVE.value
asyncio.ensure_future(self.create_work(lease))
self._leases[lease.id] = lease
self.update_bot_session()
asyncio.ensure_future(self.create_work(lease))
async def create_work(self, lease):
self.logger.debug("Work created: {}".format(lease.id))
lease = await self._work(self._context, lease)
loop = asyncio.get_event_loop()
lease = await loop.run_in_executor(None, self._work, self._context, lease)
self.logger.debug("Work complete: {}".format(lease.id))
self.lease_completed(lease)
class Worker:
def __init__(self, properties=None, configs=None):
self.properties = {}
......@@ -167,6 +193,7 @@ class Worker:
return worker
class Device:
def __init__(self, properties=None):
""" Creates devices available to the worker
......@@ -200,7 +227,6 @@ class Device:
def properties(self):
return self._properties
def get_pb2(self):
device = worker_pb2.Device(handle=self._name)
property_message = worker_pb2.Device.Property()
......