finn pushed to branch finn/78-capabilities-service at BuildGrid / buildgrid
Commits:
-
45908b7e
by Martin Blanchard at 2018-11-26T10:17:22Z
-
144df7b0
by Martin Blanchard at 2018-11-26T10:23:32Z
-
6c540071
by Martin Blanchard at 2018-11-26T10:30:08Z
-
96c6281e
by Martin Blanchard at 2018-11-26T10:30:08Z
-
bab83a4b
by Martin Blanchard at 2018-11-26T10:30:08Z
-
1f5d9210
by Martin Blanchard at 2018-11-26T10:30:08Z
-
e102e2d3
by Martin Blanchard at 2018-11-26T10:30:08Z
-
31533b69
by Martin Blanchard at 2018-11-26T10:30:08Z
-
cc1676ed
by Martin Blanchard at 2018-11-26T10:30:08Z
-
1a8dff34
by Martin Blanchard at 2018-11-26T10:30:08Z
-
66119b01
by Martin Blanchard at 2018-11-26T10:30:08Z
-
40a7a09a
by Martin Blanchard at 2018-11-26T10:30:08Z
-
fa2bc37c
by Finn at 2018-11-26T13:14:52Z
-
7a1548a4
by Finn at 2018-11-26T13:14:52Z
-
a9d2d5b0
by Finn at 2018-11-26T13:14:52Z
-
129867ca
by Finn at 2018-11-26T13:18:15Z
-
8ada4fc0
by Finn at 2018-11-26T13:18:15Z
-
41b8b8d3
by Finn at 2018-11-26T13:18:15Z
-
ba76d225
by Finn at 2018-11-26T13:18:39Z
-
76508c9d
by Finn at 2018-11-26T13:18:39Z
-
8e529da5
by Finn at 2018-11-26T13:18:39Z
-
5c29d26b
by Finn at 2018-11-26T13:19:27Z
-
f2408e33
by Finn at 2018-11-26T13:19:27Z
25 changed files:
- + buildgrid/_app/commands/cmd_capabilities.py
- buildgrid/_app/commands/cmd_server.py
- buildgrid/_enums.py
- + buildgrid/_protos/buildgrid/__init__.py
- + buildgrid/_protos/buildgrid/v2/__init__.py
- + buildgrid/_protos/buildgrid/v2/monitoring.proto
- + buildgrid/_protos/buildgrid/v2/monitoring_pb2.py
- + buildgrid/_protos/buildgrid/v2/monitoring_pb2_grpc.py
- + buildgrid/client/capabilities.py
- + buildgrid/server/_monitoring.py
- + buildgrid/server/capabilities/__init__.py
- + buildgrid/server/capabilities/instance.py
- + buildgrid/server/capabilities/service.py
- buildgrid/server/cas/instance.py
- buildgrid/server/cas/service.py
- buildgrid/server/cas/storage/storage_abc.py
- buildgrid/server/execution/instance.py
- buildgrid/server/instance.py
- buildgrid/utils.py
- tests/cas/test_services.py
- + tests/integration/capabilities_service.py
- tests/integration/operations_service.py
- tests/server_instance.py
- + tests/utils/capabilities.py
- + tests/utils/server.py
Changes:
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+import sys
|
|
17 |
+from urllib.parse import urlparse
|
|
18 |
+ |
|
19 |
+import click
|
|
20 |
+import grpc
|
|
21 |
+ |
|
22 |
+from buildgrid.client.capabilities import CapabilitiesInterface
|
|
23 |
+ |
|
24 |
+from ..cli import pass_context
|
|
25 |
+ |
|
26 |
+ |
|
27 |
+@click.command(name='capabilities', short_help="Capabilities service.")
|
|
28 |
+@click.option('--remote', type=click.STRING, default='http://localhost:50051', show_default=True,
|
|
29 |
+ help="Remote execution server's URL (port defaults to 50051 if no specified).")
|
|
30 |
+@click.option('--client-key', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
31 |
+ help="Private client key for TLS (PEM-encoded)")
|
|
32 |
+@click.option('--client-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
33 |
+ help="Public client certificate for TLS (PEM-encoded)")
|
|
34 |
+@click.option('--server-cert', type=click.Path(exists=True, dir_okay=False), default=None,
|
|
35 |
+ help="Public server certificate for TLS (PEM-encoded)")
|
|
36 |
+@click.option('--instance-name', type=click.STRING, default='main', show_default=True,
|
|
37 |
+ help="Targeted farm instance name.")
|
|
38 |
+@pass_context
|
|
39 |
+def cli(context, remote, instance_name, client_key, client_cert, server_cert):
|
|
40 |
+ click.echo("Getting capabilities...")
|
|
41 |
+ url = urlparse(remote)
|
|
42 |
+ |
|
43 |
+ remote = '{}:{}'.format(url.hostname, url.port or 50051)
|
|
44 |
+ instance_name = instance_name
|
|
45 |
+ |
|
46 |
+ if url.scheme == 'http':
|
|
47 |
+ channel = grpc.insecure_channel(remote)
|
|
48 |
+ else:
|
|
49 |
+ credentials = context.load_client_credentials(client_key, client_cert, server_cert)
|
|
50 |
+ if not credentials:
|
|
51 |
+ click.echo("ERROR: no TLS keys were specified and no defaults could be found.", err=True)
|
|
52 |
+ sys.exit(-1)
|
|
53 |
+ |
|
54 |
+ channel = grpc.secure_channel(remote, credentials)
|
|
55 |
+ |
|
56 |
+ interface = CapabilitiesInterface(channel)
|
|
57 |
+ response = interface.get_capabilities(instance_name)
|
|
58 |
+ click.echo(response)
|
... | ... | @@ -20,7 +20,6 @@ Server command |
20 | 20 |
Create a BuildGrid server.
|
21 | 21 |
"""
|
22 | 22 |
|
23 |
-import asyncio
|
|
24 | 23 |
import sys
|
25 | 24 |
|
26 | 25 |
import click
|
... | ... | @@ -51,18 +50,14 @@ def start(context, config): |
51 | 50 |
click.echo("ERROR: Could not parse config: {}.\n".format(str(e)), err=True)
|
52 | 51 |
sys.exit(-1)
|
53 | 52 |
|
54 |
- loop = asyncio.get_event_loop()
|
|
55 | 53 |
try:
|
56 | 54 |
server.start()
|
57 |
- loop.run_forever()
|
|
58 | 55 |
|
59 | 56 |
except KeyboardInterrupt:
|
60 | 57 |
pass
|
61 | 58 |
|
62 | 59 |
finally:
|
63 |
- click.echo("Stopping server")
|
|
64 | 60 |
server.stop()
|
65 |
- loop.close()
|
|
66 | 61 |
|
67 | 62 |
|
68 | 63 |
def _create_server_from_config(config):
|
... | ... | @@ -16,9 +16,13 @@ |
16 | 16 |
from enum import Enum
|
17 | 17 |
|
18 | 18 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
19 |
+from buildgrid._protos.buildgrid.v2 import monitoring_pb2
|
|
19 | 20 |
from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2
|
20 | 21 |
|
21 | 22 |
|
23 |
+# RWAPI enumerations
|
|
24 |
+# From google/devtools/remoteworkers/v1test2/bots.proto:
|
|
25 |
+ |
|
22 | 26 |
class BotStatus(Enum):
|
23 | 27 |
# Initially unknown state.
|
24 | 28 |
BOT_STATUS_UNSPECIFIED = bots_pb2.BotStatus.Value('BOT_STATUS_UNSPECIFIED')
|
... | ... | @@ -45,6 +49,9 @@ class LeaseState(Enum): |
45 | 49 |
CANCELLED = bots_pb2.LeaseState.Value('CANCELLED')
|
46 | 50 |
|
47 | 51 |
|
52 |
+# REAPI enumerations
|
|
53 |
+# From build/bazel/remote/execution/v2/remote_execution.proto:
|
|
54 |
+ |
|
48 | 55 |
class OperationStage(Enum):
|
49 | 56 |
# Initially unknown stage.
|
50 | 57 |
UNKNOWN = remote_execution_pb2.ExecuteOperationMetadata.Stage.Value('UNKNOWN')
|
... | ... | @@ -56,3 +63,41 @@ class OperationStage(Enum): |
56 | 63 |
EXECUTING = remote_execution_pb2.ExecuteOperationMetadata.Stage.Value('EXECUTING')
|
57 | 64 |
# Finished execution.
|
58 | 65 |
COMPLETED = remote_execution_pb2.ExecuteOperationMetadata.Stage.Value('COMPLETED')
|
66 |
+ |
|
67 |
+ |
|
68 |
+# Internal enumerations
|
|
69 |
+# From buildgrid.v2/monitoring.proto:
|
|
70 |
+ |
|
71 |
+class LogRecordLevel(Enum):
|
|
72 |
+ # Initially unknown level.
|
|
73 |
+ NOTSET = monitoring_pb2.LogRecord.Level.Value('NOTSET')
|
|
74 |
+ # Debug message severity level.
|
|
75 |
+ DEBUG = monitoring_pb2.LogRecord.Level.Value('DEBUG')
|
|
76 |
+ # Information message severity level.
|
|
77 |
+ INFO = monitoring_pb2.LogRecord.Level.Value('INFO')
|
|
78 |
+ # Warning message severity level.
|
|
79 |
+ WARNING = monitoring_pb2.LogRecord.Level.Value('WARNING')
|
|
80 |
+ # Error message severity level.
|
|
81 |
+ ERROR = monitoring_pb2.LogRecord.Level.Value('ERROR')
|
|
82 |
+ # Critical message severity level.
|
|
83 |
+ CRITICAL = monitoring_pb2.LogRecord.Level.Value('CRITICAL')
|
|
84 |
+ |
|
85 |
+ |
|
86 |
+class MetricRecordDomain(Enum):
|
|
87 |
+ # Initially unknown domain.
|
|
88 |
+ UNKNOWN = monitoring_pb2.MetricRecord.Domain.Value('UNKNOWN')
|
|
89 |
+ # A server state related metric.
|
|
90 |
+ STATE = monitoring_pb2.MetricRecord.Domain.Value('STATE')
|
|
91 |
+ # A build execution related metric.
|
|
92 |
+ BUILD = monitoring_pb2.MetricRecord.Domain.Value('BUILD')
|
|
93 |
+ |
|
94 |
+ |
|
95 |
+class MetricRecordType(Enum):
|
|
96 |
+ # Initially unknown type.
|
|
97 |
+ NONE = monitoring_pb2.MetricRecord.Type.Value('NONE')
|
|
98 |
+ # A metric for counting.
|
|
99 |
+ COUNTER = monitoring_pb2.MetricRecord.Type.Value('COUNTER')
|
|
100 |
+ # A metric for mesuring a duration.
|
|
101 |
+ TIMER = monitoring_pb2.MetricRecord.Type.Value('TIMER')
|
|
102 |
+ # A metric in arbitrary value.
|
|
103 |
+ GAUGE = monitoring_pb2.MetricRecord.Type.Value('GAUGE')
|
1 |
+// Copyright (C) 2018 Bloomberg LP
|
|
2 |
+//
|
|
3 |
+// Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+// you may not use this file except in compliance with the License.
|
|
5 |
+// You may obtain a copy of the License at
|
|
6 |
+//
|
|
7 |
+// <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+//
|
|
9 |
+// Unless required by applicable law or agreed to in writing, software
|
|
10 |
+// distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+// See the License for the specific language governing permissions and
|
|
13 |
+// limitations under the License.
|
|
14 |
+ |
|
15 |
+syntax = "proto3";
|
|
16 |
+ |
|
17 |
+package buildgrid.v2;
|
|
18 |
+ |
|
19 |
+import "google/api/annotations.proto";
|
|
20 |
+import "google/protobuf/duration.proto";
|
|
21 |
+import "google/protobuf/timestamp.proto";
|
|
22 |
+ |
|
23 |
+message BusMessage {
|
|
24 |
+ // The position of this message in the bus stream.
|
|
25 |
+ int64 sequence_number = 1;
|
|
26 |
+ |
|
27 |
+ // The carried message.
|
|
28 |
+ oneof record {
|
|
29 |
+ LogRecord log_record = 2;
|
|
30 |
+ MetricRecord metric_record = 3;
|
|
31 |
+ }
|
|
32 |
+}
|
|
33 |
+ |
|
34 |
+message LogRecord {
|
|
35 |
+ // When the record has been created.
|
|
36 |
+ google.protobuf.Timestamp creation_timestamp = 1;
|
|
37 |
+ |
|
38 |
+ enum Level {
|
|
39 |
+ NOTSET = 0;
|
|
40 |
+ // Debug message severity level.
|
|
41 |
+ DEBUG = 1;
|
|
42 |
+ // Information message severity level.
|
|
43 |
+ INFO = 2;
|
|
44 |
+ // Warning message severity level.
|
|
45 |
+ WARNING = 3;
|
|
46 |
+ // Error message severity level.
|
|
47 |
+ ERROR = 4;
|
|
48 |
+ // Critical message severity level.
|
|
49 |
+ CRITICAL = 5;
|
|
50 |
+ }
|
|
51 |
+ |
|
52 |
+ // The domain name for the record.
|
|
53 |
+ string domain = 2;
|
|
54 |
+ |
|
55 |
+ // The severity level of the record.
|
|
56 |
+ Level level = 3;
|
|
57 |
+ |
|
58 |
+ // The human-readable record's message.
|
|
59 |
+ string message = 4;
|
|
60 |
+ |
|
61 |
+ // An optional list of additional metadata.
|
|
62 |
+ map<string, string> metadata = 5;
|
|
63 |
+}
|
|
64 |
+ |
|
65 |
+message MetricRecord {
|
|
66 |
+ // When the metric has been created.
|
|
67 |
+ google.protobuf.Timestamp creation_timestamp = 1;
|
|
68 |
+ |
|
69 |
+ enum Domain {
|
|
70 |
+ UNKNOWN = 0;
|
|
71 |
+ // A server state related metric.
|
|
72 |
+ STATE = 1;
|
|
73 |
+ // A build execution related metric.
|
|
74 |
+ BUILD = 2;
|
|
75 |
+ }
|
|
76 |
+ |
|
77 |
+ // The domain for the record.
|
|
78 |
+ Domain domain = 2;
|
|
79 |
+ |
|
80 |
+ enum Type {
|
|
81 |
+ NONE = 0;
|
|
82 |
+ // A metric for counting.
|
|
83 |
+ COUNTER = 1;
|
|
84 |
+ // A metric for mesuring a duration.
|
|
85 |
+ TIMER = 2;
|
|
86 |
+ // A metric in arbitrary value.
|
|
87 |
+ GAUGE = 3;
|
|
88 |
+ }
|
|
89 |
+ |
|
90 |
+ // The type of metric, see Type.
|
|
91 |
+ Type type = 3;
|
|
92 |
+ |
|
93 |
+ // The name identifying the metric.
|
|
94 |
+ string name = 4;
|
|
95 |
+ |
|
96 |
+ // The carried value, depending on the metric's type.
|
|
97 |
+ oneof data {
|
|
98 |
+ // Set for Type.COUNTER metrics.
|
|
99 |
+ int32 count = 5;
|
|
100 |
+ // Set for Type.TIMER metrics.
|
|
101 |
+ google.protobuf.Duration duration = 6;
|
|
102 |
+ // Set for Type.GAUGE metrics.
|
|
103 |
+ int32 value = 7;
|
|
104 |
+ }
|
|
105 |
+ |
|
106 |
+ // An optional list of additional metadata.
|
|
107 |
+ map<string, string> metadata = 8;
|
|
108 |
+}
|
1 |
+# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
2 |
+# source: buildgrid/v2/monitoring.proto
|
|
3 |
+ |
|
4 |
+import sys
|
|
5 |
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
|
6 |
+from google.protobuf import descriptor as _descriptor
|
|
7 |
+from google.protobuf import message as _message
|
|
8 |
+from google.protobuf import reflection as _reflection
|
|
9 |
+from google.protobuf import symbol_database as _symbol_database
|
|
10 |
+# @@protoc_insertion_point(imports)
|
|
11 |
+ |
|
12 |
+_sym_db = _symbol_database.Default()
|
|
13 |
+ |
|
14 |
+ |
|
15 |
+from buildgrid._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
|
|
16 |
+from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
|
|
17 |
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
|
|
18 |
+ |
|
19 |
+ |
|
20 |
+DESCRIPTOR = _descriptor.FileDescriptor(
|
|
21 |
+ name='buildgrid/v2/monitoring.proto',
|
|
22 |
+ package='buildgrid.v2',
|
|
23 |
+ syntax='proto3',
|
|
24 |
+ serialized_options=None,
|
|
25 |
+ serialized_pb=_b('\n\x1d\x62uildgrid/v2/monitoring.proto\x12\x0c\x62uildgrid.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x93\x01\n\nBusMessage\x12\x17\n\x0fsequence_number\x18\x01 \x01(\x03\x12-\n\nlog_record\x18\x02 \x01(\x0b\x32\x17.buildgrid.v2.LogRecordH\x00\x12\x33\n\rmetric_record\x18\x03 \x01(\x0b\x32\x1a.buildgrid.v2.MetricRecordH\x00\x42\x08\n\x06record\"\xcc\x02\n\tLogRecord\x12\x36\n\x12\x63reation_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0e\n\x06\x64omain\x18\x02 \x01(\t\x12,\n\x05level\x18\x03 \x01(\x0e\x32\x1d.buildgrid.v2.LogRecord.Level\x12\x0f\n\x07message\x18\x04 \x01(\t\x12\x37\n\x08metadata\x18\x05 \x03(\x0b\x32%.buildgrid.v2.LogRecord.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"N\n\x05Level\x12\n\n\x06NOTSET\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\x01\x12\x08\n\x04INFO\x10\x02\x12\x0b\n\x07WARNING\x10\x03\x12\t\n\x05\x45RROR\x10\x04\x12\x0c\n\x08\x43RITICAL\x10\x05\"\xde\x03\n\x0cMetricRecord\x12\x36\n\x12\x63reation_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x06\x64omain\x18\x02 \x01(\x0e\x32!.buildgrid.v2.MetricRecord.Domain\x12-\n\x04type\x18\x03 \x01(\x0e\x32\x1f.buildgrid.v2.MetricRecord.Type\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x0f\n\x05\x63ount\x18\x05 \x01(\x05H\x00\x12-\n\x08\x64uration\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x0f\n\x05value\x18\x07 \x01(\x05H\x00\x12:\n\x08metadata\x18\x08 \x03(\x0b\x32(.buildgrid.v2.MetricRecord.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"+\n\x06\x44omain\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05STATE\x10\x01\x12\t\n\x05\x42UILD\x10\x02\"3\n\x04Type\x12\x08\n\x04NONE\x10\x00\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\t\n\x05TIMER\x10\x02\x12\t\n\x05GAUGE\x10\x03\x42\x06\n\x04\x64\x61tab\x06proto3')
|
|
26 |
+ ,
|
|
27 |
+ dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,])
|
|
28 |
+ |
|
29 |
+ |
|
30 |
+ |
|
31 |
+_LOGRECORD_LEVEL = _descriptor.EnumDescriptor(
|
|
32 |
+ name='Level',
|
|
33 |
+ full_name='buildgrid.v2.LogRecord.Level',
|
|
34 |
+ filename=None,
|
|
35 |
+ file=DESCRIPTOR,
|
|
36 |
+ values=[
|
|
37 |
+ _descriptor.EnumValueDescriptor(
|
|
38 |
+ name='NOTSET', index=0, number=0,
|
|
39 |
+ serialized_options=None,
|
|
40 |
+ type=None),
|
|
41 |
+ _descriptor.EnumValueDescriptor(
|
|
42 |
+ name='DEBUG', index=1, number=1,
|
|
43 |
+ serialized_options=None,
|
|
44 |
+ type=None),
|
|
45 |
+ _descriptor.EnumValueDescriptor(
|
|
46 |
+ name='INFO', index=2, number=2,
|
|
47 |
+ serialized_options=None,
|
|
48 |
+ type=None),
|
|
49 |
+ _descriptor.EnumValueDescriptor(
|
|
50 |
+ name='WARNING', index=3, number=3,
|
|
51 |
+ serialized_options=None,
|
|
52 |
+ type=None),
|
|
53 |
+ _descriptor.EnumValueDescriptor(
|
|
54 |
+ name='ERROR', index=4, number=4,
|
|
55 |
+ serialized_options=None,
|
|
56 |
+ type=None),
|
|
57 |
+ _descriptor.EnumValueDescriptor(
|
|
58 |
+ name='CRITICAL', index=5, number=5,
|
|
59 |
+ serialized_options=None,
|
|
60 |
+ type=None),
|
|
61 |
+ ],
|
|
62 |
+ containing_type=None,
|
|
63 |
+ serialized_options=None,
|
|
64 |
+ serialized_start=547,
|
|
65 |
+ serialized_end=625,
|
|
66 |
+)
|
|
67 |
+_sym_db.RegisterEnumDescriptor(_LOGRECORD_LEVEL)
|
|
68 |
+ |
|
69 |
+_METRICRECORD_DOMAIN = _descriptor.EnumDescriptor(
|
|
70 |
+ name='Domain',
|
|
71 |
+ full_name='buildgrid.v2.MetricRecord.Domain',
|
|
72 |
+ filename=None,
|
|
73 |
+ file=DESCRIPTOR,
|
|
74 |
+ values=[
|
|
75 |
+ _descriptor.EnumValueDescriptor(
|
|
76 |
+ name='UNKNOWN', index=0, number=0,
|
|
77 |
+ serialized_options=None,
|
|
78 |
+ type=None),
|
|
79 |
+ _descriptor.EnumValueDescriptor(
|
|
80 |
+ name='STATE', index=1, number=1,
|
|
81 |
+ serialized_options=None,
|
|
82 |
+ type=None),
|
|
83 |
+ _descriptor.EnumValueDescriptor(
|
|
84 |
+ name='BUILD', index=2, number=2,
|
|
85 |
+ serialized_options=None,
|
|
86 |
+ type=None),
|
|
87 |
+ ],
|
|
88 |
+ containing_type=None,
|
|
89 |
+ serialized_options=None,
|
|
90 |
+ serialized_start=1002,
|
|
91 |
+ serialized_end=1045,
|
|
92 |
+)
|
|
93 |
+_sym_db.RegisterEnumDescriptor(_METRICRECORD_DOMAIN)
|
|
94 |
+ |
|
95 |
+_METRICRECORD_TYPE = _descriptor.EnumDescriptor(
|
|
96 |
+ name='Type',
|
|
97 |
+ full_name='buildgrid.v2.MetricRecord.Type',
|
|
98 |
+ filename=None,
|
|
99 |
+ file=DESCRIPTOR,
|
|
100 |
+ values=[
|
|
101 |
+ _descriptor.EnumValueDescriptor(
|
|
102 |
+ name='NONE', index=0, number=0,
|
|
103 |
+ serialized_options=None,
|
|
104 |
+ type=None),
|
|
105 |
+ _descriptor.EnumValueDescriptor(
|
|
106 |
+ name='COUNTER', index=1, number=1,
|
|
107 |
+ serialized_options=None,
|
|
108 |
+ type=None),
|
|
109 |
+ _descriptor.EnumValueDescriptor(
|
|
110 |
+ name='TIMER', index=2, number=2,
|
|
111 |
+ serialized_options=None,
|
|
112 |
+ type=None),
|
|
113 |
+ _descriptor.EnumValueDescriptor(
|
|
114 |
+ name='GAUGE', index=3, number=3,
|
|
115 |
+ serialized_options=None,
|
|
116 |
+ type=None),
|
|
117 |
+ ],
|
|
118 |
+ containing_type=None,
|
|
119 |
+ serialized_options=None,
|
|
120 |
+ serialized_start=1047,
|
|
121 |
+ serialized_end=1098,
|
|
122 |
+)
|
|
123 |
+_sym_db.RegisterEnumDescriptor(_METRICRECORD_TYPE)
|
|
124 |
+ |
|
125 |
+ |
|
126 |
+_BUSMESSAGE = _descriptor.Descriptor(
|
|
127 |
+ name='BusMessage',
|
|
128 |
+ full_name='buildgrid.v2.BusMessage',
|
|
129 |
+ filename=None,
|
|
130 |
+ file=DESCRIPTOR,
|
|
131 |
+ containing_type=None,
|
|
132 |
+ fields=[
|
|
133 |
+ _descriptor.FieldDescriptor(
|
|
134 |
+ name='sequence_number', full_name='buildgrid.v2.BusMessage.sequence_number', index=0,
|
|
135 |
+ number=1, type=3, cpp_type=2, label=1,
|
|
136 |
+ has_default_value=False, default_value=0,
|
|
137 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
138 |
+ is_extension=False, extension_scope=None,
|
|
139 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
140 |
+ _descriptor.FieldDescriptor(
|
|
141 |
+ name='log_record', full_name='buildgrid.v2.BusMessage.log_record', index=1,
|
|
142 |
+ number=2, type=11, cpp_type=10, label=1,
|
|
143 |
+ has_default_value=False, default_value=None,
|
|
144 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
145 |
+ is_extension=False, extension_scope=None,
|
|
146 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
147 |
+ _descriptor.FieldDescriptor(
|
|
148 |
+ name='metric_record', full_name='buildgrid.v2.BusMessage.metric_record', index=2,
|
|
149 |
+ number=3, type=11, cpp_type=10, label=1,
|
|
150 |
+ has_default_value=False, default_value=None,
|
|
151 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
152 |
+ is_extension=False, extension_scope=None,
|
|
153 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
154 |
+ ],
|
|
155 |
+ extensions=[
|
|
156 |
+ ],
|
|
157 |
+ nested_types=[],
|
|
158 |
+ enum_types=[
|
|
159 |
+ ],
|
|
160 |
+ serialized_options=None,
|
|
161 |
+ is_extendable=False,
|
|
162 |
+ syntax='proto3',
|
|
163 |
+ extension_ranges=[],
|
|
164 |
+ oneofs=[
|
|
165 |
+ _descriptor.OneofDescriptor(
|
|
166 |
+ name='record', full_name='buildgrid.v2.BusMessage.record',
|
|
167 |
+ index=0, containing_type=None, fields=[]),
|
|
168 |
+ ],
|
|
169 |
+ serialized_start=143,
|
|
170 |
+ serialized_end=290,
|
|
171 |
+)
|
|
172 |
+ |
|
173 |
+ |
|
174 |
+_LOGRECORD_METADATAENTRY = _descriptor.Descriptor(
|
|
175 |
+ name='MetadataEntry',
|
|
176 |
+ full_name='buildgrid.v2.LogRecord.MetadataEntry',
|
|
177 |
+ filename=None,
|
|
178 |
+ file=DESCRIPTOR,
|
|
179 |
+ containing_type=None,
|
|
180 |
+ fields=[
|
|
181 |
+ _descriptor.FieldDescriptor(
|
|
182 |
+ name='key', full_name='buildgrid.v2.LogRecord.MetadataEntry.key', index=0,
|
|
183 |
+ number=1, type=9, cpp_type=9, label=1,
|
|
184 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
185 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
186 |
+ is_extension=False, extension_scope=None,
|
|
187 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
188 |
+ _descriptor.FieldDescriptor(
|
|
189 |
+ name='value', full_name='buildgrid.v2.LogRecord.MetadataEntry.value', index=1,
|
|
190 |
+ number=2, type=9, cpp_type=9, label=1,
|
|
191 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
192 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
193 |
+ is_extension=False, extension_scope=None,
|
|
194 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
195 |
+ ],
|
|
196 |
+ extensions=[
|
|
197 |
+ ],
|
|
198 |
+ nested_types=[],
|
|
199 |
+ enum_types=[
|
|
200 |
+ ],
|
|
201 |
+ serialized_options=_b('8\001'),
|
|
202 |
+ is_extendable=False,
|
|
203 |
+ syntax='proto3',
|
|
204 |
+ extension_ranges=[],
|
|
205 |
+ oneofs=[
|
|
206 |
+ ],
|
|
207 |
+ serialized_start=498,
|
|
208 |
+ serialized_end=545,
|
|
209 |
+)
|
|
210 |
+ |
|
211 |
+_LOGRECORD = _descriptor.Descriptor(
|
|
212 |
+ name='LogRecord',
|
|
213 |
+ full_name='buildgrid.v2.LogRecord',
|
|
214 |
+ filename=None,
|
|
215 |
+ file=DESCRIPTOR,
|
|
216 |
+ containing_type=None,
|
|
217 |
+ fields=[
|
|
218 |
+ _descriptor.FieldDescriptor(
|
|
219 |
+ name='creation_timestamp', full_name='buildgrid.v2.LogRecord.creation_timestamp', index=0,
|
|
220 |
+ number=1, type=11, cpp_type=10, label=1,
|
|
221 |
+ has_default_value=False, default_value=None,
|
|
222 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
223 |
+ is_extension=False, extension_scope=None,
|
|
224 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
225 |
+ _descriptor.FieldDescriptor(
|
|
226 |
+ name='domain', full_name='buildgrid.v2.LogRecord.domain', index=1,
|
|
227 |
+ number=2, type=9, cpp_type=9, label=1,
|
|
228 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
229 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
230 |
+ is_extension=False, extension_scope=None,
|
|
231 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
232 |
+ _descriptor.FieldDescriptor(
|
|
233 |
+ name='level', full_name='buildgrid.v2.LogRecord.level', index=2,
|
|
234 |
+ number=3, type=14, cpp_type=8, label=1,
|
|
235 |
+ has_default_value=False, default_value=0,
|
|
236 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
237 |
+ is_extension=False, extension_scope=None,
|
|
238 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
239 |
+ _descriptor.FieldDescriptor(
|
|
240 |
+ name='message', full_name='buildgrid.v2.LogRecord.message', index=3,
|
|
241 |
+ number=4, type=9, cpp_type=9, label=1,
|
|
242 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
243 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
244 |
+ is_extension=False, extension_scope=None,
|
|
245 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
246 |
+ _descriptor.FieldDescriptor(
|
|
247 |
+ name='metadata', full_name='buildgrid.v2.LogRecord.metadata', index=4,
|
|
248 |
+ number=5, type=11, cpp_type=10, label=3,
|
|
249 |
+ has_default_value=False, default_value=[],
|
|
250 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
251 |
+ is_extension=False, extension_scope=None,
|
|
252 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
253 |
+ ],
|
|
254 |
+ extensions=[
|
|
255 |
+ ],
|
|
256 |
+ nested_types=[_LOGRECORD_METADATAENTRY, ],
|
|
257 |
+ enum_types=[
|
|
258 |
+ _LOGRECORD_LEVEL,
|
|
259 |
+ ],
|
|
260 |
+ serialized_options=None,
|
|
261 |
+ is_extendable=False,
|
|
262 |
+ syntax='proto3',
|
|
263 |
+ extension_ranges=[],
|
|
264 |
+ oneofs=[
|
|
265 |
+ ],
|
|
266 |
+ serialized_start=293,
|
|
267 |
+ serialized_end=625,
|
|
268 |
+)
|
|
269 |
+ |
|
270 |
+ |
|
271 |
+_METRICRECORD_METADATAENTRY = _descriptor.Descriptor(
|
|
272 |
+ name='MetadataEntry',
|
|
273 |
+ full_name='buildgrid.v2.MetricRecord.MetadataEntry',
|
|
274 |
+ filename=None,
|
|
275 |
+ file=DESCRIPTOR,
|
|
276 |
+ containing_type=None,
|
|
277 |
+ fields=[
|
|
278 |
+ _descriptor.FieldDescriptor(
|
|
279 |
+ name='key', full_name='buildgrid.v2.MetricRecord.MetadataEntry.key', index=0,
|
|
280 |
+ number=1, type=9, cpp_type=9, label=1,
|
|
281 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
282 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
283 |
+ is_extension=False, extension_scope=None,
|
|
284 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
285 |
+ _descriptor.FieldDescriptor(
|
|
286 |
+ name='value', full_name='buildgrid.v2.MetricRecord.MetadataEntry.value', index=1,
|
|
287 |
+ number=2, type=9, cpp_type=9, label=1,
|
|
288 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
289 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
290 |
+ is_extension=False, extension_scope=None,
|
|
291 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
292 |
+ ],
|
|
293 |
+ extensions=[
|
|
294 |
+ ],
|
|
295 |
+ nested_types=[],
|
|
296 |
+ enum_types=[
|
|
297 |
+ ],
|
|
298 |
+ serialized_options=_b('8\001'),
|
|
299 |
+ is_extendable=False,
|
|
300 |
+ syntax='proto3',
|
|
301 |
+ extension_ranges=[],
|
|
302 |
+ oneofs=[
|
|
303 |
+ ],
|
|
304 |
+ serialized_start=498,
|
|
305 |
+ serialized_end=545,
|
|
306 |
+)
|
|
307 |
+ |
|
308 |
+_METRICRECORD = _descriptor.Descriptor(
|
|
309 |
+ name='MetricRecord',
|
|
310 |
+ full_name='buildgrid.v2.MetricRecord',
|
|
311 |
+ filename=None,
|
|
312 |
+ file=DESCRIPTOR,
|
|
313 |
+ containing_type=None,
|
|
314 |
+ fields=[
|
|
315 |
+ _descriptor.FieldDescriptor(
|
|
316 |
+ name='creation_timestamp', full_name='buildgrid.v2.MetricRecord.creation_timestamp', index=0,
|
|
317 |
+ number=1, type=11, cpp_type=10, label=1,
|
|
318 |
+ has_default_value=False, default_value=None,
|
|
319 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
320 |
+ is_extension=False, extension_scope=None,
|
|
321 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
322 |
+ _descriptor.FieldDescriptor(
|
|
323 |
+ name='domain', full_name='buildgrid.v2.MetricRecord.domain', index=1,
|
|
324 |
+ number=2, type=14, cpp_type=8, label=1,
|
|
325 |
+ has_default_value=False, default_value=0,
|
|
326 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
327 |
+ is_extension=False, extension_scope=None,
|
|
328 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
329 |
+ _descriptor.FieldDescriptor(
|
|
330 |
+ name='type', full_name='buildgrid.v2.MetricRecord.type', index=2,
|
|
331 |
+ number=3, type=14, cpp_type=8, label=1,
|
|
332 |
+ has_default_value=False, default_value=0,
|
|
333 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
334 |
+ is_extension=False, extension_scope=None,
|
|
335 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
336 |
+ _descriptor.FieldDescriptor(
|
|
337 |
+ name='name', full_name='buildgrid.v2.MetricRecord.name', index=3,
|
|
338 |
+ number=4, type=9, cpp_type=9, label=1,
|
|
339 |
+ has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
340 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
341 |
+ is_extension=False, extension_scope=None,
|
|
342 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
343 |
+ _descriptor.FieldDescriptor(
|
|
344 |
+ name='count', full_name='buildgrid.v2.MetricRecord.count', index=4,
|
|
345 |
+ number=5, type=5, cpp_type=1, label=1,
|
|
346 |
+ has_default_value=False, default_value=0,
|
|
347 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
348 |
+ is_extension=False, extension_scope=None,
|
|
349 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
350 |
+ _descriptor.FieldDescriptor(
|
|
351 |
+ name='duration', full_name='buildgrid.v2.MetricRecord.duration', index=5,
|
|
352 |
+ number=6, type=11, cpp_type=10, label=1,
|
|
353 |
+ has_default_value=False, default_value=None,
|
|
354 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
355 |
+ is_extension=False, extension_scope=None,
|
|
356 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
357 |
+ _descriptor.FieldDescriptor(
|
|
358 |
+ name='value', full_name='buildgrid.v2.MetricRecord.value', index=6,
|
|
359 |
+ number=7, type=5, cpp_type=1, label=1,
|
|
360 |
+ has_default_value=False, default_value=0,
|
|
361 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
362 |
+ is_extension=False, extension_scope=None,
|
|
363 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
364 |
+ _descriptor.FieldDescriptor(
|
|
365 |
+ name='metadata', full_name='buildgrid.v2.MetricRecord.metadata', index=7,
|
|
366 |
+ number=8, type=11, cpp_type=10, label=3,
|
|
367 |
+ has_default_value=False, default_value=[],
|
|
368 |
+ message_type=None, enum_type=None, containing_type=None,
|
|
369 |
+ is_extension=False, extension_scope=None,
|
|
370 |
+ serialized_options=None, file=DESCRIPTOR),
|
|
371 |
+ ],
|
|
372 |
+ extensions=[
|
|
373 |
+ ],
|
|
374 |
+ nested_types=[_METRICRECORD_METADATAENTRY, ],
|
|
375 |
+ enum_types=[
|
|
376 |
+ _METRICRECORD_DOMAIN,
|
|
377 |
+ _METRICRECORD_TYPE,
|
|
378 |
+ ],
|
|
379 |
+ serialized_options=None,
|
|
380 |
+ is_extendable=False,
|
|
381 |
+ syntax='proto3',
|
|
382 |
+ extension_ranges=[],
|
|
383 |
+ oneofs=[
|
|
384 |
+ _descriptor.OneofDescriptor(
|
|
385 |
+ name='data', full_name='buildgrid.v2.MetricRecord.data',
|
|
386 |
+ index=0, containing_type=None, fields=[]),
|
|
387 |
+ ],
|
|
388 |
+ serialized_start=628,
|
|
389 |
+ serialized_end=1106,
|
|
390 |
+)
|
|
391 |
+ |
|
392 |
+_BUSMESSAGE.fields_by_name['log_record'].message_type = _LOGRECORD
|
|
393 |
+_BUSMESSAGE.fields_by_name['metric_record'].message_type = _METRICRECORD
|
|
394 |
+_BUSMESSAGE.oneofs_by_name['record'].fields.append(
|
|
395 |
+ _BUSMESSAGE.fields_by_name['log_record'])
|
|
396 |
+_BUSMESSAGE.fields_by_name['log_record'].containing_oneof = _BUSMESSAGE.oneofs_by_name['record']
|
|
397 |
+_BUSMESSAGE.oneofs_by_name['record'].fields.append(
|
|
398 |
+ _BUSMESSAGE.fields_by_name['metric_record'])
|
|
399 |
+_BUSMESSAGE.fields_by_name['metric_record'].containing_oneof = _BUSMESSAGE.oneofs_by_name['record']
|
|
400 |
+_LOGRECORD_METADATAENTRY.containing_type = _LOGRECORD
|
|
401 |
+_LOGRECORD.fields_by_name['creation_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
|
|
402 |
+_LOGRECORD.fields_by_name['level'].enum_type = _LOGRECORD_LEVEL
|
|
403 |
+_LOGRECORD.fields_by_name['metadata'].message_type = _LOGRECORD_METADATAENTRY
|
|
404 |
+_LOGRECORD_LEVEL.containing_type = _LOGRECORD
|
|
405 |
+_METRICRECORD_METADATAENTRY.containing_type = _METRICRECORD
|
|
406 |
+_METRICRECORD.fields_by_name['creation_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
|
|
407 |
+_METRICRECORD.fields_by_name['domain'].enum_type = _METRICRECORD_DOMAIN
|
|
408 |
+_METRICRECORD.fields_by_name['type'].enum_type = _METRICRECORD_TYPE
|
|
409 |
+_METRICRECORD.fields_by_name['duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
|
|
410 |
+_METRICRECORD.fields_by_name['metadata'].message_type = _METRICRECORD_METADATAENTRY
|
|
411 |
+_METRICRECORD_DOMAIN.containing_type = _METRICRECORD
|
|
412 |
+_METRICRECORD_TYPE.containing_type = _METRICRECORD
|
|
413 |
+_METRICRECORD.oneofs_by_name['data'].fields.append(
|
|
414 |
+ _METRICRECORD.fields_by_name['count'])
|
|
415 |
+_METRICRECORD.fields_by_name['count'].containing_oneof = _METRICRECORD.oneofs_by_name['data']
|
|
416 |
+_METRICRECORD.oneofs_by_name['data'].fields.append(
|
|
417 |
+ _METRICRECORD.fields_by_name['duration'])
|
|
418 |
+_METRICRECORD.fields_by_name['duration'].containing_oneof = _METRICRECORD.oneofs_by_name['data']
|
|
419 |
+_METRICRECORD.oneofs_by_name['data'].fields.append(
|
|
420 |
+ _METRICRECORD.fields_by_name['value'])
|
|
421 |
+_METRICRECORD.fields_by_name['value'].containing_oneof = _METRICRECORD.oneofs_by_name['data']
|
|
422 |
+DESCRIPTOR.message_types_by_name['BusMessage'] = _BUSMESSAGE
|
|
423 |
+DESCRIPTOR.message_types_by_name['LogRecord'] = _LOGRECORD
|
|
424 |
+DESCRIPTOR.message_types_by_name['MetricRecord'] = _METRICRECORD
|
|
425 |
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
|
426 |
+ |
|
427 |
+BusMessage = _reflection.GeneratedProtocolMessageType('BusMessage', (_message.Message,), dict(
|
|
428 |
+ DESCRIPTOR = _BUSMESSAGE,
|
|
429 |
+ __module__ = 'buildgrid.v2.monitoring_pb2'
|
|
430 |
+ # @@protoc_insertion_point(class_scope:buildgrid.v2.BusMessage)
|
|
431 |
+ ))
|
|
432 |
+_sym_db.RegisterMessage(BusMessage)
|
|
433 |
+ |
|
434 |
+LogRecord = _reflection.GeneratedProtocolMessageType('LogRecord', (_message.Message,), dict(
|
|
435 |
+ |
|
436 |
+ MetadataEntry = _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), dict(
|
|
437 |
+ DESCRIPTOR = _LOGRECORD_METADATAENTRY,
|
|
438 |
+ __module__ = 'buildgrid.v2.monitoring_pb2'
|
|
439 |
+ # @@protoc_insertion_point(class_scope:buildgrid.v2.LogRecord.MetadataEntry)
|
|
440 |
+ ))
|
|
441 |
+ ,
|
|
442 |
+ DESCRIPTOR = _LOGRECORD,
|
|
443 |
+ __module__ = 'buildgrid.v2.monitoring_pb2'
|
|
444 |
+ # @@protoc_insertion_point(class_scope:buildgrid.v2.LogRecord)
|
|
445 |
+ ))
|
|
446 |
+_sym_db.RegisterMessage(LogRecord)
|
|
447 |
+_sym_db.RegisterMessage(LogRecord.MetadataEntry)
|
|
448 |
+ |
|
449 |
+MetricRecord = _reflection.GeneratedProtocolMessageType('MetricRecord', (_message.Message,), dict(
|
|
450 |
+ |
|
451 |
+ MetadataEntry = _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), dict(
|
|
452 |
+ DESCRIPTOR = _METRICRECORD_METADATAENTRY,
|
|
453 |
+ __module__ = 'buildgrid.v2.monitoring_pb2'
|
|
454 |
+ # @@protoc_insertion_point(class_scope:buildgrid.v2.MetricRecord.MetadataEntry)
|
|
455 |
+ ))
|
|
456 |
+ ,
|
|
457 |
+ DESCRIPTOR = _METRICRECORD,
|
|
458 |
+ __module__ = 'buildgrid.v2.monitoring_pb2'
|
|
459 |
+ # @@protoc_insertion_point(class_scope:buildgrid.v2.MetricRecord)
|
|
460 |
+ ))
|
|
461 |
+_sym_db.RegisterMessage(MetricRecord)
|
|
462 |
+_sym_db.RegisterMessage(MetricRecord.MetadataEntry)
|
|
463 |
+ |
|
464 |
+ |
|
465 |
+_LOGRECORD_METADATAENTRY._options = None
|
|
466 |
+_METRICRECORD_METADATAENTRY._options = None
|
|
467 |
+# @@protoc_insertion_point(module_scope)
|
1 |
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
|
2 |
+import grpc
|
|
3 |
+ |
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+import logging
|
|
17 |
+import grpc
|
|
18 |
+ |
|
19 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
|
|
20 |
+ |
|
21 |
+ |
|
22 |
+class CapabilitiesInterface:
|
|
23 |
+ """Interface for calls the the Capabilities Service."""
|
|
24 |
+ |
|
25 |
+ def __init__(self, channel):
|
|
26 |
+ """Initialises an instance of the capabilities service.
|
|
27 |
+ |
|
28 |
+ Args:
|
|
29 |
+ channel (grpc.Channel): A gRPC channel to the CAS endpoint.
|
|
30 |
+ """
|
|
31 |
+ self.__logger = logging.getLogger(__name__)
|
|
32 |
+ self.__stub = remote_execution_pb2_grpc.CapabilitiesStub(channel)
|
|
33 |
+ |
|
34 |
+ def get_capabilities(self, instance_name):
|
|
35 |
+ """Returns the capabilities or the server to the user.
|
|
36 |
+ |
|
37 |
+ Args:
|
|
38 |
+ instance_name (str): The name of the instance."""
|
|
39 |
+ |
|
40 |
+ request = remote_execution_pb2.GetCapabilitiesRequest(instance_name=instance_name)
|
|
41 |
+ try:
|
|
42 |
+ return self.__stub.GetCapabilities(request)
|
|
43 |
+ |
|
44 |
+ except grpc.RpcError as e:
|
|
45 |
+ self.__logger.error(e)
|
|
46 |
+ raise
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+import asyncio
|
|
17 |
+import ctypes
|
|
18 |
+from enum import Enum
|
|
19 |
+import sys
|
|
20 |
+ |
|
21 |
+from google.protobuf import json_format
|
|
22 |
+ |
|
23 |
+from buildgrid._protos.buildgrid.v2 import monitoring_pb2
|
|
24 |
+ |
|
25 |
+ |
|
26 |
+class MonitoringOutputType(Enum):
|
|
27 |
+ # Standard output stream.
|
|
28 |
+ STDOUT = 'stdout'
|
|
29 |
+ # On-disk file.
|
|
30 |
+ FILE = 'file'
|
|
31 |
+ # UNIX domain socket.
|
|
32 |
+ SOCKET = 'socket'
|
|
33 |
+ |
|
34 |
+ |
|
35 |
+class MonitoringOutputFormat(Enum):
|
|
36 |
+ # Protobuf binary format.
|
|
37 |
+ BINARY = 'binary'
|
|
38 |
+ # JSON format.
|
|
39 |
+ JSON = 'json'
|
|
40 |
+ |
|
41 |
+ |
|
42 |
+class MonitoringBus:
|
|
43 |
+ |
|
44 |
+ def __init__(self, event_loop,
|
|
45 |
+ endpoint_type=MonitoringOutputType.SOCKET, endpoint_location=None,
|
|
46 |
+ serialisation_format=MonitoringOutputFormat.BINARY):
|
|
47 |
+ self.__event_loop = event_loop
|
|
48 |
+ self.__streaming_task = None
|
|
49 |
+ |
|
50 |
+ self.__message_queue = asyncio.Queue(loop=self.__event_loop)
|
|
51 |
+ self.__sequence_number = 1
|
|
52 |
+ |
|
53 |
+ self.__output_location = None
|
|
54 |
+ self.__async_output = False
|
|
55 |
+ self.__json_output = False
|
|
56 |
+ |
|
57 |
+ if endpoint_type == MonitoringOutputType.FILE:
|
|
58 |
+ self.__output_location = endpoint_location
|
|
59 |
+ |
|
60 |
+ elif endpoint_type == MonitoringOutputType.SOCKET:
|
|
61 |
+ self.__output_location = endpoint_location
|
|
62 |
+ self.__async_output = True
|
|
63 |
+ |
|
64 |
+ if serialisation_format == MonitoringOutputFormat.JSON:
|
|
65 |
+ self.__json_output = True
|
|
66 |
+ |
|
67 |
+ # --- Public API ---
|
|
68 |
+ |
|
69 |
+ def start(self):
|
|
70 |
+ """Starts the monitoring bus worker task."""
|
|
71 |
+ if self.__streaming_task is not None:
|
|
72 |
+ return
|
|
73 |
+ |
|
74 |
+ self.__streaming_task = asyncio.ensure_future(
|
|
75 |
+ self._streaming_worker(), loop=self.__event_loop)
|
|
76 |
+ |
|
77 |
+ def stop(self):
|
|
78 |
+ """Cancels the monitoring bus worker task."""
|
|
79 |
+ if self.__streaming_task is None:
|
|
80 |
+ return
|
|
81 |
+ |
|
82 |
+ self.__streaming_task.cancel()
|
|
83 |
+ |
|
84 |
+ async def send_record(self, record):
|
|
85 |
+ """Publishes a record onto the bus asynchronously.
|
|
86 |
+ |
|
87 |
+ Args:
|
|
88 |
+ record (Message): The
|
|
89 |
+ """
|
|
90 |
+ await self.__message_queue.put(record)
|
|
91 |
+ |
|
92 |
+ def send_record_nowait(self, record):
|
|
93 |
+ """Publishes a record onto the bus.
|
|
94 |
+ |
|
95 |
+ Args:
|
|
96 |
+ record (Message): The
|
|
97 |
+ """
|
|
98 |
+ self.__message_queue.put_nowait(record)
|
|
99 |
+ |
|
100 |
+ # --- Private API ---
|
|
101 |
+ |
|
102 |
+ async def _streaming_worker(self):
|
|
103 |
+ """Handles bus messages streaming work."""
|
|
104 |
+ async def __streaming_worker(end_points):
|
|
105 |
+ record = await self.__message_queue.get()
|
|
106 |
+ |
|
107 |
+ message = monitoring_pb2.BusMessage()
|
|
108 |
+ message.sequence_number = self.__sequence_number
|
|
109 |
+ |
|
110 |
+ if record.DESCRIPTOR is monitoring_pb2.LogRecord.DESCRIPTOR:
|
|
111 |
+ message.log_record.CopyFrom(record)
|
|
112 |
+ |
|
113 |
+ elif record.DESCRIPTOR is monitoring_pb2.MetricRecord.DESCRIPTOR:
|
|
114 |
+ message.metric_record.CopyFrom(record)
|
|
115 |
+ |
|
116 |
+ else:
|
|
117 |
+ return False
|
|
118 |
+ |
|
119 |
+ if self.__json_output:
|
|
120 |
+ blob_message = json_format.MessageToJson(message).encode()
|
|
121 |
+ |
|
122 |
+ for end_point in end_points:
|
|
123 |
+ end_point.write(blob_message)
|
|
124 |
+ |
|
125 |
+ else:
|
|
126 |
+ blob_size = ctypes.c_uint32(message.ByteSize())
|
|
127 |
+ blob_message = message.SerializeToString()
|
|
128 |
+ |
|
129 |
+ for end_point in end_points:
|
|
130 |
+ end_point.write(bytes(blob_size))
|
|
131 |
+ end_point.write(blob_message)
|
|
132 |
+ |
|
133 |
+ return True
|
|
134 |
+ |
|
135 |
+ output_writers, output_file = [], None
|
|
136 |
+ |
|
137 |
+ async def __client_connected_callback(reader, writer):
|
|
138 |
+ output_writers.append(writer)
|
|
139 |
+ |
|
140 |
+ try:
|
|
141 |
+ if self.__async_output and self.__output_location:
|
|
142 |
+ await asyncio.start_unix_server(
|
|
143 |
+ __client_connected_callback, path=self.__output_location,
|
|
144 |
+ loop=self.__event_loop)
|
|
145 |
+ |
|
146 |
+ while True:
|
|
147 |
+ if await __streaming_worker(output_writers):
|
|
148 |
+ self.__sequence_number += 1
|
|
149 |
+ |
|
150 |
+ for writer in output_writers:
|
|
151 |
+ await writer.drain()
|
|
152 |
+ |
|
153 |
+ elif self.__output_location:
|
|
154 |
+ output_file = open(self.__output_location, mode='wb')
|
|
155 |
+ |
|
156 |
+ output_writers.append(output_file)
|
|
157 |
+ |
|
158 |
+ while True:
|
|
159 |
+ if await __streaming_worker(iter(output_file)):
|
|
160 |
+ self.__sequence_number += 1
|
|
161 |
+ |
|
162 |
+ else:
|
|
163 |
+ output_writers.append(sys.stdout.buffer)
|
|
164 |
+ |
|
165 |
+ while True:
|
|
166 |
+ if await __streaming_worker(output_writers):
|
|
167 |
+ self.__sequence_number += 1
|
|
168 |
+ |
|
169 |
+ except asyncio.CancelledError:
|
|
170 |
+ if output_file is not None:
|
|
171 |
+ output_file.close()
|
|
172 |
+ |
|
173 |
+ elif output_writers:
|
|
174 |
+ for writer in output_writers:
|
|
175 |
+ writer.close()
|
|
176 |
+ await writer.wait_closed()
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+import logging
|
|
17 |
+ |
|
18 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
19 |
+ |
|
20 |
+ |
|
21 |
+class CapabilitiesInstance:
|
|
22 |
+ |
|
23 |
+ def __init__(self, cas_instance=None, action_cache_instance=None, execution_instance=None):
|
|
24 |
+ self.__logger = logging.getLogger(__name__)
|
|
25 |
+ self.__cas_instance = cas_instance
|
|
26 |
+ self.__action_cache_instance = action_cache_instance
|
|
27 |
+ self.__execution_instance = execution_instance
|
|
28 |
+ |
|
29 |
+ def register_instance_with_server(self, instance_name, server):
|
|
30 |
+ server.add_capabilities_instance(self, instance_name)
|
|
31 |
+ |
|
32 |
+ def add_cas_instance(self, cas_instance):
|
|
33 |
+ self.__cas_instance = cas_instance
|
|
34 |
+ |
|
35 |
+ def add_action_cache_instance(self, action_cache_instance):
|
|
36 |
+ self.__action_cache_instance = action_cache_instance
|
|
37 |
+ |
|
38 |
+ def add_execution_instance(self, execution_instance):
|
|
39 |
+ self.__execution_instance = execution_instance
|
|
40 |
+ |
|
41 |
+ def get_capabilities(self):
|
|
42 |
+ server_capabilities = remote_execution_pb2.ServerCapabilities()
|
|
43 |
+ server_capabilities.cache_capabilities.CopyFrom(self._get_cache_capabilities())
|
|
44 |
+ server_capabilities.execution_capabilities.CopyFrom(self._get_capabilities_execution())
|
|
45 |
+ return server_capabilities
|
|
46 |
+ |
|
47 |
+ def _get_cache_capabilities(self):
|
|
48 |
+ capabilities = remote_execution_pb2.CacheCapabilities()
|
|
49 |
+ action_cache_update_capabilities = remote_execution_pb2.ActionCacheUpdateCapabilities()
|
|
50 |
+ |
|
51 |
+ if self.__cas_instance:
|
|
52 |
+ capabilities.digest_function.extend([self.__cas_instance.hash_type()])
|
|
53 |
+ capabilities.max_batch_total_size_bytes = self.__cas_instance.max_batch_total_size_bytes()
|
|
54 |
+ capabilities.symlink_absolute_path_strategy = self.__cas_instance.symlink_absolute_path_strategy()
|
|
55 |
+ # TODO: execution priority #102
|
|
56 |
+ # capabilities.cache_priority_capabilities =
|
|
57 |
+ |
|
58 |
+ if self.__action_cache_instance:
|
|
59 |
+ action_cache_update_capabilities.update_enabled = self.__action_cache_instance.allow_updates
|
|
60 |
+ |
|
61 |
+ capabilities.action_cache_update_capabilities.CopyFrom(action_cache_update_capabilities)
|
|
62 |
+ return capabilities
|
|
63 |
+ |
|
64 |
+ def _get_capabilities_execution(self):
|
|
65 |
+ capabilities = remote_execution_pb2.ExecutionCapabilities()
|
|
66 |
+ if self.__execution_instance:
|
|
67 |
+ capabilities.exec_enabled = True
|
|
68 |
+ capabilities.digest_function = self.__execution_instance.hash_type()
|
|
69 |
+ # TODO: execution priority #102
|
|
70 |
+ # capabilities.execution_priority =
|
|
71 |
+ |
|
72 |
+ else:
|
|
73 |
+ capabilities.exec_enabled = False
|
|
74 |
+ |
|
75 |
+ return capabilities
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+import logging
|
|
17 |
+ |
|
18 |
+import grpc
|
|
19 |
+ |
|
20 |
+from buildgrid._exceptions import InvalidArgumentError
|
|
21 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2_grpc
|
|
22 |
+ |
|
23 |
+ |
|
24 |
+class CapabilitiesService(remote_execution_pb2_grpc.CapabilitiesServicer):
|
|
25 |
+ |
|
26 |
+ def __init__(self, server):
|
|
27 |
+ self.__logger = logging.getLogger(__name__)
|
|
28 |
+ self.__instances = {}
|
|
29 |
+ remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(self, server)
|
|
30 |
+ |
|
31 |
+ def add_instance(self, name, instance):
|
|
32 |
+ self.__instances[name] = instance
|
|
33 |
+ |
|
34 |
+ def add_cas_instance(self, name, instance):
|
|
35 |
+ self.__instances[name].add_cas_instance(instance)
|
|
36 |
+ |
|
37 |
+ def add_action_cache_instance(self, name, instance):
|
|
38 |
+ self.__instances[name].add_action_cache_instance(instance)
|
|
39 |
+ |
|
40 |
+ def add_execution_instance(self, name, instance):
|
|
41 |
+ self.__instances[name].add_execution_instance(instance)
|
|
42 |
+ |
|
43 |
+ def GetCapabilities(self, request, context):
|
|
44 |
+ try:
|
|
45 |
+ instance = self._get_instance(request.instance_name)
|
|
46 |
+ return instance.get_capabilities()
|
|
47 |
+ |
|
48 |
+ except InvalidArgumentError as e:
|
|
49 |
+ self.__logger.error(e)
|
|
50 |
+ context.set_details(str(e))
|
|
51 |
+ context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
|
|
52 |
+ |
|
53 |
+ return remote_execution_pb2_grpc.ServerCapabilities()
|
|
54 |
+ |
|
55 |
+ def _get_instance(self, name):
|
|
56 |
+ try:
|
|
57 |
+ return self.__instances[name]
|
|
58 |
+ |
|
59 |
+ except KeyError:
|
|
60 |
+ raise InvalidArgumentError("Instance doesn't exist on server: [{}]".format(name))
|
... | ... | @@ -24,7 +24,8 @@ import logging |
24 | 24 |
from buildgrid._exceptions import InvalidArgumentError, NotFoundError, OutOfRangeError
|
25 | 25 |
from buildgrid._protos.google.bytestream import bytestream_pb2
|
26 | 26 |
from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2 as re_pb2
|
27 |
-from buildgrid.settings import HASH
|
|
27 |
+from buildgrid.settings import HASH, HASH_LENGTH
|
|
28 |
+from buildgrid.utils import get_hash_type
|
|
28 | 29 |
|
29 | 30 |
|
30 | 31 |
class ContentAddressableStorageInstance:
|
... | ... | @@ -37,6 +38,19 @@ class ContentAddressableStorageInstance: |
37 | 38 |
def register_instance_with_server(self, instance_name, server):
|
38 | 39 |
server.add_cas_instance(self, instance_name)
|
39 | 40 |
|
41 |
+ def hash_type(self):
|
|
42 |
+ return get_hash_type()
|
|
43 |
+ |
|
44 |
+ def max_batch_total_size_bytes(self):
|
|
45 |
+ # TODO: link with max size
|
|
46 |
+ # Should be added from settings in MR !119
|
|
47 |
+ return 2000000
|
|
48 |
+ |
|
49 |
+ def symlink_absolute_path_strategy(self):
|
|
50 |
+ # Currently this strategy is hardcoded into BuildGrid
|
|
51 |
+ # With no setting to reference
|
|
52 |
+ return re_pb2.CacheCapabilities().DISALLOWED
|
|
53 |
+ |
|
40 | 54 |
def find_missing_blobs(self, blob_digests):
|
41 | 55 |
storage = self._storage
|
42 | 56 |
return re_pb2.FindMissingBlobsResponse(
|
... | ... | @@ -71,15 +85,12 @@ class ByteStreamInstance: |
71 | 85 |
def register_instance_with_server(self, instance_name, server):
|
72 | 86 |
server.add_bytestream_instance(self, instance_name)
|
73 | 87 |
|
74 |
- def read(self, path, read_offset, read_limit):
|
|
75 |
- storage = self._storage
|
|
88 |
+ def read(self, digest_hash, digest_size, read_offset, read_limit):
|
|
89 |
+ if len(digest_hash) != HASH_LENGTH or not digest_size.isdigit():
|
|
90 |
+ raise InvalidArgumentError("Invalid digest [{}/{}]"
|
|
91 |
+ .format(digest_hash, digest_size))
|
|
76 | 92 |
|
77 |
- if path[0] == "blobs":
|
|
78 |
- path = [""] + path
|
|
79 |
- |
|
80 |
- # Parse/verify resource name.
|
|
81 |
- # Read resource names look like "[instance/]blobs/abc123hash/99".
|
|
82 |
- digest = re_pb2.Digest(hash=path[2], size_bytes=int(path[3]))
|
|
93 |
+ digest = re_pb2.Digest(hash=digest_hash, size_bytes=int(digest_size))
|
|
83 | 94 |
|
84 | 95 |
# Check the given read offset and limit.
|
85 | 96 |
if read_offset < 0 or read_offset > digest.size_bytes:
|
... | ... | @@ -95,7 +106,7 @@ class ByteStreamInstance: |
95 | 106 |
raise InvalidArgumentError("Negative read_limit is invalid")
|
96 | 107 |
|
97 | 108 |
# Read the blob from storage and send its contents to the client.
|
98 |
- result = storage.get_blob(digest)
|
|
109 |
+ result = self._storage.get_blob(digest)
|
|
99 | 110 |
if result is None:
|
100 | 111 |
raise NotFoundError("Blob not found")
|
101 | 112 |
|
... | ... | @@ -110,51 +121,35 @@ class ByteStreamInstance: |
110 | 121 |
data=result.read(min(self.BLOCK_SIZE, bytes_remaining)))
|
111 | 122 |
bytes_remaining -= self.BLOCK_SIZE
|
112 | 123 |
|
113 |
- def write(self, requests):
|
|
114 |
- storage = self._storage
|
|
115 |
- |
|
116 |
- first_request = next(requests)
|
|
117 |
- path = first_request.resource_name.split("/")
|
|
124 |
+ def write(self, digest_hash, digest_size, first_block, other_blocks):
|
|
125 |
+ if len(digest_hash) != HASH_LENGTH or not digest_size.isdigit():
|
|
126 |
+ raise InvalidArgumentError("Invalid digest [{}/{}]"
|
|
127 |
+ .format(digest_hash, digest_size))
|
|
118 | 128 |
|
119 |
- if path[0] == "uploads":
|
|
120 |
- path = [""] + path
|
|
129 |
+ digest = re_pb2.Digest(hash=digest_hash, size_bytes=int(digest_size))
|
|
121 | 130 |
|
122 |
- digest = re_pb2.Digest(hash=path[4], size_bytes=int(path[5]))
|
|
123 |
- write_session = storage.begin_write(digest)
|
|
131 |
+ write_session = self._storage.begin_write(digest)
|
|
124 | 132 |
|
125 | 133 |
# Start the write session and write the first request's data.
|
126 |
- write_session.write(first_request.data)
|
|
127 |
- hash_ = HASH(first_request.data)
|
|
128 |
- bytes_written = len(first_request.data)
|
|
129 |
- finished = first_request.finish_write
|
|
134 |
+ write_session.write(first_block)
|
|
130 | 135 |
|
131 |
- # Handle subsequent write requests.
|
|
132 |
- while not finished:
|
|
133 |
- |
|
134 |
- for request in requests:
|
|
135 |
- if finished:
|
|
136 |
- raise InvalidArgumentError("Write request sent after write finished")
|
|
137 |
- |
|
138 |
- elif request.write_offset != bytes_written:
|
|
139 |
- raise InvalidArgumentError("Invalid write offset")
|
|
136 |
+ computed_hash = HASH(first_block)
|
|
137 |
+ bytes_written = len(first_block)
|
|
140 | 138 |
|
141 |
- elif request.resource_name and request.resource_name != first_request.resource_name:
|
|
142 |
- raise InvalidArgumentError("Resource name changed mid-write")
|
|
143 |
- |
|
144 |
- finished = request.finish_write
|
|
145 |
- bytes_written += len(request.data)
|
|
146 |
- if bytes_written > digest.size_bytes:
|
|
147 |
- raise InvalidArgumentError("Wrote too much data to blob")
|
|
139 |
+ # Handle subsequent write requests.
|
|
140 |
+ for next_block in other_blocks:
|
|
141 |
+ write_session.write(next_block)
|
|
148 | 142 |
|
149 |
- write_session.write(request.data)
|
|
150 |
- hash_.update(request.data)
|
|
143 |
+ computed_hash.update(next_block)
|
|
144 |
+ bytes_written += len(next_block)
|
|
151 | 145 |
|
152 | 146 |
# Check that the data matches the provided digest.
|
153 |
- if bytes_written != digest.size_bytes or not finished:
|
|
147 |
+ if bytes_written != digest.size_bytes:
|
|
154 | 148 |
raise NotImplementedError("Cannot close stream before finishing write")
|
155 | 149 |
|
156 |
- elif hash_.hexdigest() != digest.hash:
|
|
150 |
+ elif computed_hash.hexdigest() != digest.hash:
|
|
157 | 151 |
raise InvalidArgumentError("Data does not match hash")
|
158 | 152 |
|
159 |
- storage.commit_write(digest, write_session)
|
|
153 |
+ self._storage.commit_write(digest, write_session)
|
|
154 |
+ |
|
160 | 155 |
return bytestream_pb2.WriteResponse(committed_size=bytes_written)
|
... | ... | @@ -21,7 +21,6 @@ Implements the Content Addressable Storage API and ByteStream API. |
21 | 21 |
"""
|
22 | 22 |
|
23 | 23 |
|
24 |
-from itertools import tee
|
|
25 | 24 |
import logging
|
26 | 25 |
|
27 | 26 |
import grpc
|
... | ... | @@ -115,27 +114,30 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
115 | 114 |
def Read(self, request, context):
|
116 | 115 |
self.__logger.debug("Read request from [%s]", context.peer())
|
117 | 116 |
|
117 |
+ names = request.resource_name.split('/')
|
|
118 |
+ |
|
118 | 119 |
try:
|
119 |
- path = request.resource_name.split("/")
|
|
120 |
- instance_name = path[0]
|
|
120 |
+ instance_name = ''
|
|
121 |
+ # Format: "{instance_name}/blobs/{hash}/{size}":
|
|
122 |
+ if len(names) < 3 or names[-3] != 'blobs':
|
|
123 |
+ raise InvalidArgumentError("Invalid resource name: [{}]"
|
|
124 |
+ .format(request.resource_name))
|
|
121 | 125 |
|
122 |
- # TODO: Decide on default instance name
|
|
123 |
- if path[0] == "blobs":
|
|
124 |
- if len(path) < 3 or not path[2].isdigit():
|
|
125 |
- raise InvalidArgumentError("Invalid resource name: [{}]".format(request.resource_name))
|
|
126 |
- instance_name = ""
|
|
126 |
+ elif names[0] != 'blobs':
|
|
127 |
+ index = names.index('blobs')
|
|
128 |
+ instance_name = '/'.join(names[:index])
|
|
129 |
+ names = names[index:]
|
|
127 | 130 |
|
128 |
- elif path[1] == "blobs":
|
|
129 |
- if len(path) < 4 or not path[3].isdigit():
|
|
130 |
- raise InvalidArgumentError("Invalid resource name: [{}]".format(request.resource_name))
|
|
131 |
+ if len(names) < 3:
|
|
132 |
+ raise InvalidArgumentError("Invalid resource name: [{}]"
|
|
133 |
+ .format(request.resource_name))
|
|
131 | 134 |
|
132 |
- else:
|
|
133 |
- raise InvalidArgumentError("Invalid resource name: [{}]".format(request.resource_name))
|
|
135 |
+ hash_, size_bytes = names[1], names[2]
|
|
134 | 136 |
|
135 | 137 |
instance = self._get_instance(instance_name)
|
136 |
- yield from instance.read(path,
|
|
137 |
- request.read_offset,
|
|
138 |
- request.read_limit)
|
|
138 |
+ |
|
139 |
+ yield from instance.read(hash_, size_bytes,
|
|
140 |
+ request.read_offset, request.read_limit)
|
|
139 | 141 |
|
140 | 142 |
except InvalidArgumentError as e:
|
141 | 143 |
self.__logger.error(e)
|
... | ... | @@ -158,31 +160,31 @@ class ByteStreamService(bytestream_pb2_grpc.ByteStreamServicer): |
158 | 160 |
def Write(self, requests, context):
|
159 | 161 |
self.__logger.debug("Write request from [%s]", context.peer())
|
160 | 162 |
|
161 |
- try:
|
|
162 |
- requests, request_probe = tee(requests, 2)
|
|
163 |
- first_request = next(request_probe)
|
|
164 |
- |
|
165 |
- path = first_request.resource_name.split("/")
|
|
163 |
+ request = next(requests)
|
|
164 |
+ names = request.resource_name.split('/')
|
|
166 | 165 |
|
167 |
- instance_name = path[0]
|
|
166 |
+ try:
|
|
167 |
+ instance_name = ''
|
|
168 |
+ # Format: "{instance_name}/uploads/{uuid}/blobs/{hash}/{size}/{anything}":
|
|
169 |
+ if len(names) < 5 or 'uploads' not in names or 'blobs' not in names:
|
|
170 |
+ raise InvalidArgumentError("Invalid resource name: [{}]"
|
|
171 |
+ .format(request.resource_name))
|
|
168 | 172 |
|
169 |
- # TODO: Sort out no instance name
|
|
170 |
- if path[0] == "uploads":
|
|
171 |
- if len(path) < 5 or path[2] != "blobs" or not path[4].isdigit():
|
|
172 |
- raise InvalidArgumentError("Invalid resource name: [{}]".format(first_request.resource_name))
|
|
173 |
- instance_name = ""
|
|
173 |
+ elif names[0] != 'uploads':
|
|
174 |
+ index = names.index('uploads')
|
|
175 |
+ instance_name = '/'.join(names[:index])
|
|
176 |
+ names = names[index:]
|
|
174 | 177 |
|
175 |
- elif path[1] == "uploads":
|
|
176 |
- if len(path) < 6 or path[3] != "blobs" or not path[5].isdigit():
|
|
177 |
- raise InvalidArgumentError("Invalid resource name: [{}]".format(first_request.resource_name))
|
|
178 |
+ if len(names) < 5:
|
|
179 |
+ raise InvalidArgumentError("Invalid resource name: [{}]"
|
|
180 |
+ .format(request.resource_name))
|
|
178 | 181 |
|
179 |
- else:
|
|
180 |
- raise InvalidArgumentError("Invalid resource name: [{}]".format(first_request.resource_name))
|
|
182 |
+ _, hash_, size_bytes = names[1], names[3], names[4]
|
|
181 | 183 |
|
182 | 184 |
instance = self._get_instance(instance_name)
|
183 |
- response = instance.write(requests)
|
|
184 | 185 |
|
185 |
- return response
|
|
186 |
+ return instance.write(hash_, size_bytes, request.data,
|
|
187 |
+ [request.data for request in requests])
|
|
186 | 188 |
|
187 | 189 |
except NotImplementedError as e:
|
188 | 190 |
self.__logger.error(e)
|
... | ... | @@ -34,7 +34,7 @@ class StorageABC(abc.ABC): |
34 | 34 |
@abc.abstractmethod
|
35 | 35 |
def has_blob(self, digest):
|
36 | 36 |
"""Return True if the blob with the given instance/digest exists."""
|
37 |
- pass
|
|
37 |
+ raise NotImplementedError()
|
|
38 | 38 |
|
39 | 39 |
@abc.abstractmethod
|
40 | 40 |
def get_blob(self, digest):
|
... | ... | @@ -42,14 +42,14 @@ class StorageABC(abc.ABC): |
42 | 42 |
|
43 | 43 |
If the blob isn't present in storage, return None.
|
44 | 44 |
"""
|
45 |
- pass
|
|
45 |
+ raise NotImplementedError()
|
|
46 | 46 |
|
47 | 47 |
@abc.abstractmethod
|
48 | 48 |
def begin_write(self, digest):
|
49 | 49 |
"""Return a file-like object to which a blob's contents could be
|
50 | 50 |
written.
|
51 | 51 |
"""
|
52 |
- pass
|
|
52 |
+ raise NotImplementedError()
|
|
53 | 53 |
|
54 | 54 |
@abc.abstractmethod
|
55 | 55 |
def commit_write(self, digest, write_session):
|
... | ... | @@ -60,7 +60,7 @@ class StorageABC(abc.ABC): |
60 | 60 |
written to the write_session actually matches the digest. The caller
|
61 | 61 |
must do that.
|
62 | 62 |
"""
|
63 |
- pass
|
|
63 |
+ raise NotImplementedError()
|
|
64 | 64 |
|
65 | 65 |
def missing_blobs(self, digests):
|
66 | 66 |
"""Return a container containing the blobs not present in CAS."""
|
... | ... | @@ -25,6 +25,7 @@ from buildgrid._exceptions import FailedPreconditionError, InvalidArgumentError |
25 | 25 |
from buildgrid._protos.build.bazel.remote.execution.v2.remote_execution_pb2 import Action
|
26 | 26 |
|
27 | 27 |
from ..job import Job
|
28 |
+from ...utils import get_hash_type
|
|
28 | 29 |
|
29 | 30 |
|
30 | 31 |
class ExecutionInstance:
|
... | ... | @@ -38,6 +39,9 @@ class ExecutionInstance: |
38 | 39 |
def register_instance_with_server(self, instance_name, server):
|
39 | 40 |
server.add_execution_instance(self, instance_name)
|
40 | 41 |
|
42 |
+ def hash_type(self):
|
|
43 |
+ return get_hash_type()
|
|
44 |
+ |
|
41 | 45 |
def execute(self, action_digest, skip_cache_lookup, message_queue=None):
|
42 | 46 |
""" Sends a job for execution.
|
43 | 47 |
Queues an action and creates an Operation instance to be associated with
|
... | ... | @@ -13,18 +13,24 @@ |
13 | 13 |
# limitations under the License.
|
14 | 14 |
|
15 | 15 |
|
16 |
+import asyncio
|
|
16 | 17 |
from concurrent import futures
|
17 | 18 |
import logging
|
18 | 19 |
import os
|
20 |
+import signal
|
|
19 | 21 |
|
20 | 22 |
import grpc
|
21 | 23 |
|
22 | 24 |
from .cas.service import ByteStreamService, ContentAddressableStorageService
|
23 | 25 |
from .actioncache.service import ActionCacheService
|
26 |
+from .capabilities.service import CapabilitiesService
|
|
24 | 27 |
from .execution.service import ExecutionService
|
25 | 28 |
from .operations.service import OperationsService
|
26 | 29 |
from .bots.service import BotsService
|
27 | 30 |
from .referencestorage.service import ReferenceStorageService
|
31 |
+from ._monitoring import MonitoringBus, MonitoringOutputType, MonitoringOutputFormat
|
|
32 |
+ |
|
33 |
+from .capabilities.instance import CapabilitiesInstance
|
|
28 | 34 |
|
29 | 35 |
|
30 | 36 |
class BuildGridServer:
|
... | ... | @@ -34,7 +40,7 @@ class BuildGridServer: |
34 | 40 |
requisite services.
|
35 | 41 |
"""
|
36 | 42 |
|
37 |
- def __init__(self, max_workers=None):
|
|
43 |
+ def __init__(self, max_workers=None, monitor=False):
|
|
38 | 44 |
"""Initializes a new :class:`BuildGridServer` instance.
|
39 | 45 |
|
40 | 46 |
Args:
|
... | ... | @@ -46,9 +52,14 @@ class BuildGridServer: |
46 | 52 |
# Use max_workers default from Python 3.5+
|
47 | 53 |
max_workers = (os.cpu_count() or 1) * 5
|
48 | 54 |
|
49 |
- server = grpc.server(futures.ThreadPoolExecutor(max_workers))
|
|
55 |
+ self.__grpc_executor = futures.ThreadPoolExecutor(max_workers)
|
|
56 |
+ self.__grpc_server = grpc.server(self.__grpc_executor)
|
|
57 |
+ |
|
58 |
+ self.__main_loop = asyncio.get_event_loop()
|
|
59 |
+ self.__monitoring_bus = None
|
|
50 | 60 |
|
51 |
- self._server = server
|
|
61 |
+ # We always want a capabilities service
|
|
62 |
+ self._capabilities_service = CapabilitiesService(self.__grpc_server)
|
|
52 | 63 |
|
53 | 64 |
self._execution_service = None
|
54 | 65 |
self._bots_service = None
|
... | ... | @@ -58,15 +69,34 @@ class BuildGridServer: |
58 | 69 |
self._cas_service = None
|
59 | 70 |
self._bytestream_service = None
|
60 | 71 |
|
72 |
+ self._is_instrumented = monitor
|
|
73 |
+ |
|
74 |
+ if self._is_instrumented:
|
|
75 |
+ self.__monitoring_bus = MonitoringBus(
|
|
76 |
+ self.__main_loop, endpoint_type=MonitoringOutputType.STDOUT,
|
|
77 |
+ serialisation_format=MonitoringOutputFormat.JSON)
|
|
78 |
+ |
|
79 |
+ # --- Public API ---
|
|
80 |
+ |
|
61 | 81 |
def start(self):
|
62 |
- """Starts the server.
|
|
63 |
- """
|
|
64 |
- self._server.start()
|
|
82 |
+ """Starts the BuildGrid server."""
|
|
83 |
+ self.__grpc_server.start()
|
|
65 | 84 |
|
66 |
- def stop(self, grace=0):
|
|
67 |
- """Stops the server.
|
|
68 |
- """
|
|
69 |
- self._server.stop(grace)
|
|
85 |
+ if self._is_instrumented:
|
|
86 |
+ self.__monitoring_bus.start()
|
|
87 |
+ |
|
88 |
+ self.__main_loop.add_signal_handler(signal.SIGTERM, self.stop)
|
|
89 |
+ |
|
90 |
+ self.__main_loop.run_forever()
|
|
91 |
+ |
|
92 |
+ def stop(self):
|
|
93 |
+ """Stops the BuildGrid server."""
|
|
94 |
+ if self._is_instrumented:
|
|
95 |
+ self.__monitoring_bus.stop()
|
|
96 |
+ |
|
97 |
+ self.__main_loop.stop()
|
|
98 |
+ |
|
99 |
+ self.__grpc_server.stop(None)
|
|
70 | 100 |
|
71 | 101 |
def add_port(self, address, credentials):
|
72 | 102 |
"""Adds a port to the server.
|
... | ... | @@ -77,14 +107,19 @@ class BuildGridServer: |
77 | 107 |
Args:
|
78 | 108 |
address (str): The address with port number.
|
79 | 109 |
credentials (:obj:`grpc.ChannelCredentials`): Credentials object.
|
110 |
+ |
|
111 |
+ Returns:
|
|
112 |
+ int: Number of the bound port.
|
|
80 | 113 |
"""
|
81 | 114 |
if credentials is not None:
|
82 | 115 |
self.__logger.info("Adding secure connection on: [%s]", address)
|
83 |
- self._server.add_secure_port(address, credentials)
|
|
116 |
+ port_number = self.__grpc_server.add_secure_port(address, credentials)
|
|
84 | 117 |
|
85 | 118 |
else:
|
86 | 119 |
self.__logger.info("Adding insecure connection on [%s]", address)
|
87 |
- self._server.add_insecure_port(address)
|
|
120 |
+ port_number = self.__grpc_server.add_insecure_port(address)
|
|
121 |
+ |
|
122 |
+ return port_number
|
|
88 | 123 |
|
89 | 124 |
def add_execution_instance(self, instance, instance_name):
|
90 | 125 |
"""Adds an :obj:`ExecutionInstance` to the service.
|
... | ... | @@ -96,9 +131,10 @@ class BuildGridServer: |
96 | 131 |
instance_name (str): Instance name.
|
97 | 132 |
"""
|
98 | 133 |
if self._execution_service is None:
|
99 |
- self._execution_service = ExecutionService(self._server)
|
|
134 |
+ self._execution_service = ExecutionService(self.__grpc_server)
|
|
100 | 135 |
|
101 | 136 |
self._execution_service.add_instance(instance_name, instance)
|
137 |
+ self._add_capabilities_instance(instance_name, execution_instance=instance)
|
|
102 | 138 |
|
103 | 139 |
def add_bots_interface(self, instance, instance_name):
|
104 | 140 |
"""Adds a :obj:`BotsInterface` to the service.
|
... | ... | @@ -110,7 +146,7 @@ class BuildGridServer: |
110 | 146 |
instance_name (str): Instance name.
|
111 | 147 |
"""
|
112 | 148 |
if self._bots_service is None:
|
113 |
- self._bots_service = BotsService(self._server)
|
|
149 |
+ self._bots_service = BotsService(self.__grpc_server)
|
|
114 | 150 |
|
115 | 151 |
self._bots_service.add_instance(instance_name, instance)
|
116 | 152 |
|
... | ... | @@ -124,7 +160,7 @@ class BuildGridServer: |
124 | 160 |
instance_name (str): Instance name.
|
125 | 161 |
"""
|
126 | 162 |
if self._operations_service is None:
|
127 |
- self._operations_service = OperationsService(self._server)
|
|
163 |
+ self._operations_service = OperationsService(self.__grpc_server)
|
|
128 | 164 |
|
129 | 165 |
self._operations_service.add_instance(instance_name, instance)
|
130 | 166 |
|
... | ... | @@ -138,7 +174,7 @@ class BuildGridServer: |
138 | 174 |
instance_name (str): Instance name.
|
139 | 175 |
"""
|
140 | 176 |
if self._reference_storage_service is None:
|
141 |
- self._reference_storage_service = ReferenceStorageService(self._server)
|
|
177 |
+ self._reference_storage_service = ReferenceStorageService(self.__grpc_server)
|
|
142 | 178 |
|
143 | 179 |
self._reference_storage_service.add_instance(instance_name, instance)
|
144 | 180 |
|
... | ... | @@ -152,12 +188,13 @@ class BuildGridServer: |
152 | 188 |
instance_name (str): Instance name.
|
153 | 189 |
"""
|
154 | 190 |
if self._action_cache_service is None:
|
155 |
- self._action_cache_service = ActionCacheService(self._server)
|
|
191 |
+ self._action_cache_service = ActionCacheService(self.__grpc_server)
|
|
156 | 192 |
|
157 | 193 |
self._action_cache_service.add_instance(instance_name, instance)
|
194 |
+ self._add_capabilities_instance(instance_name, action_cache_instance=instance)
|
|
158 | 195 |
|
159 | 196 |
def add_cas_instance(self, instance, instance_name):
|
160 |
- """Stores a :obj:`ContentAddressableStorageInstance` to the service.
|
|
197 |
+ """Adds a :obj:`ContentAddressableStorageInstance` to the service.
|
|
161 | 198 |
|
162 | 199 |
If no service exists, it creates one.
|
163 | 200 |
|
... | ... | @@ -166,12 +203,12 @@ class BuildGridServer: |
166 | 203 |
instance_name (str): Instance name.
|
167 | 204 |
"""
|
168 | 205 |
if self._cas_service is None:
|
169 |
- self._cas_service = ContentAddressableStorageService(self._server)
|
|
206 |
+ self._cas_service = ContentAddressableStorageService(self.__grpc_server)
|
|
170 | 207 |
|
171 |
- self._cas_service.add_instance(instance_name, instance)
|
|
208 |
+ self._add_capabilities_instance(instance_name, cas_instance=instance)
|
|
172 | 209 |
|
173 | 210 |
def add_bytestream_instance(self, instance, instance_name):
|
174 |
- """Stores a :obj:`ByteStreamInstance` to the service.
|
|
211 |
+ """Adds a :obj:`ByteStreamInstance` to the service.
|
|
175 | 212 |
|
176 | 213 |
If no service exists, it creates one.
|
177 | 214 |
|
... | ... | @@ -180,6 +217,37 @@ class BuildGridServer: |
180 | 217 |
instance_name (str): Instance name.
|
181 | 218 |
"""
|
182 | 219 |
if self._bytestream_service is None:
|
183 |
- self._bytestream_service = ByteStreamService(self._server)
|
|
220 |
+ self._bytestream_service = ByteStreamService(self.__grpc_server)
|
|
184 | 221 |
|
185 | 222 |
self._bytestream_service.add_instance(instance_name, instance)
|
223 |
+ |
|
224 |
+ def _add_capabilities_instance(self, instance_name,
|
|
225 |
+ cas_instance=None,
|
|
226 |
+ action_cache_instance=None,
|
|
227 |
+ execution_instance=None):
|
|
228 |
+ """Adds a :obj:`CapabilitiesInstance` to the service.
|
|
229 |
+ |
|
230 |
+ Args:
|
|
231 |
+ instance (:obj:`CapabilitiesInstance`): Instance to add.
|
|
232 |
+ instance_name (str): Instance name.
|
|
233 |
+ """
|
|
234 |
+ |
|
235 |
+ try:
|
|
236 |
+ if cas_instance:
|
|
237 |
+ self._capabilities_service.add_cas_instance(instance_name, cas_instance)
|
|
238 |
+ if action_cache_instance:
|
|
239 |
+ self._capabilities_service.add_action_cache_instance(instance_name, action_cache_instance)
|
|
240 |
+ if execution_instance:
|
|
241 |
+ self._capabilities_service.add_execution_instance(instance_name, execution_instance)
|
|
242 |
+ |
|
243 |
+ except KeyError:
|
|
244 |
+ capabilities_instance = CapabilitiesInstance(cas_instance,
|
|
245 |
+ action_cache_instance,
|
|
246 |
+ execution_instance)
|
|
247 |
+ self._capabilities_service.add_instance(instance_name, capabilities_instance)
|
|
248 |
+ |
|
249 |
+ # --- Public API: Monitoring ---
|
|
250 |
+ |
|
251 |
+ @property
|
|
252 |
+ def is_instrumented(self):
|
|
253 |
+ return self._is_instrumented
|
... | ... | @@ -30,6 +30,14 @@ def get_hostname(): |
30 | 30 |
return socket.gethostname()
|
31 | 31 |
|
32 | 32 |
|
33 |
+def get_hash_type():
|
|
34 |
+ """Returns the hash type."""
|
|
35 |
+ hash_name = HASH().name
|
|
36 |
+ if hash_name == "sha256":
|
|
37 |
+ return remote_execution_pb2.SHA256
|
|
38 |
+ return remote_execution_pb2.UNKNOWN
|
|
39 |
+ |
|
40 |
+ |
|
33 | 41 |
def create_digest(bytes_to_digest):
|
34 | 42 |
"""Computes the :obj:`Digest` of a piece of data.
|
35 | 43 |
|
... | ... | @@ -137,7 +137,7 @@ def test_bytestream_write(mocked, instance, extra_data): |
137 | 137 |
bytestream_pb2.WriteRequest(data=b'def', write_offset=3, finish_write=True)
|
138 | 138 |
]
|
139 | 139 |
|
140 |
- response = servicer.Write(requests, context)
|
|
140 |
+ response = servicer.Write(iter(requests), context)
|
|
141 | 141 |
assert response.committed_size == 6
|
142 | 142 |
assert len(storage.data) == 1
|
143 | 143 |
assert (hash_, 6) in storage.data
|
... | ... | @@ -159,10 +159,10 @@ def test_bytestream_write_rejects_wrong_hash(mocked): |
159 | 159 |
bytestream_pb2.WriteRequest(resource_name=resource_name, data=data, finish_write=True)
|
160 | 160 |
]
|
161 | 161 |
|
162 |
- servicer.Write(requests, context)
|
|
162 |
+ servicer.Write(iter(requests), context)
|
|
163 | 163 |
context.set_code.assert_called_once_with(grpc.StatusCode.INVALID_ARGUMENT)
|
164 | 164 |
|
165 |
- assert len(storage.data) is 0
|
|
165 |
+ assert not storage.data
|
|
166 | 166 |
|
167 | 167 |
|
168 | 168 |
@pytest.mark.parametrize("instance", instances)
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+# pylint: disable=redefined-outer-name
|
|
16 |
+ |
|
17 |
+ |
|
18 |
+import grpc
|
|
19 |
+import pytest
|
|
20 |
+ |
|
21 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
22 |
+from buildgrid.client.capabilities import CapabilitiesInterface
|
|
23 |
+from buildgrid.server.controller import ExecutionController
|
|
24 |
+from buildgrid.server.actioncache.storage import ActionCache
|
|
25 |
+from buildgrid.server.cas.instance import ContentAddressableStorageInstance
|
|
26 |
+from buildgrid.server.cas.storage.lru_memory_cache import LRUMemoryCache
|
|
27 |
+ |
|
28 |
+from ..utils.utils import run_in_subprocess
|
|
29 |
+from ..utils.capabilities import serve_capabilities_service
|
|
30 |
+ |
|
31 |
+ |
|
32 |
+INSTANCES = ['', 'instance']
|
|
33 |
+ |
|
34 |
+ |
|
35 |
+# Use subprocess to avoid creation of gRPC threads in main process
|
|
36 |
+# See https://github.com/grpc/grpc/blob/master/doc/fork_support.md
|
|
37 |
+# Multiprocessing uses pickle which protobufs don't work with
|
|
38 |
+# Workaround wrapper to send messages as strings
|
|
39 |
+class ServerInterface:
|
|
40 |
+ |
|
41 |
+ def __init__(self, remote):
|
|
42 |
+ self.__remote = remote
|
|
43 |
+ |
|
44 |
+ def get_capabilities(self, instance_name):
|
|
45 |
+ |
|
46 |
+ def __get_capabilities(queue, remote, instance_name):
|
|
47 |
+ interface = CapabilitiesInterface(grpc.insecure_channel(remote))
|
|
48 |
+ |
|
49 |
+ result = interface.get_capabilities(instance_name)
|
|
50 |
+ queue.put(result.SerializeToString())
|
|
51 |
+ |
|
52 |
+ result = run_in_subprocess(__get_capabilities,
|
|
53 |
+ self.__remote, instance_name)
|
|
54 |
+ |
|
55 |
+ capabilities = remote_execution_pb2.ServerCapabilities()
|
|
56 |
+ capabilities.ParseFromString(result)
|
|
57 |
+ return capabilities
|
|
58 |
+ |
|
59 |
+ |
|
60 |
+@pytest.mark.parametrize('instance', INSTANCES)
|
|
61 |
+def test_execution_not_available_capabilities(instance):
|
|
62 |
+ with serve_capabilities_service([instance]) as server:
|
|
63 |
+ server_interface = ServerInterface(server.remote)
|
|
64 |
+ response = server_interface.get_capabilities(instance)
|
|
65 |
+ |
|
66 |
+ assert not response.execution_capabilities.exec_enabled
|
|
67 |
+ |
|
68 |
+ |
|
69 |
+@pytest.mark.parametrize('instance', INSTANCES)
|
|
70 |
+def test_execution_available_capabilities(instance):
|
|
71 |
+ controller = ExecutionController()
|
|
72 |
+ |
|
73 |
+ with serve_capabilities_service([instance],
|
|
74 |
+ execution_instance=controller.execution_instance) as server:
|
|
75 |
+ server_interface = ServerInterface(server.remote)
|
|
76 |
+ response = server_interface.get_capabilities(instance)
|
|
77 |
+ |
|
78 |
+ assert response.execution_capabilities.exec_enabled
|
|
79 |
+ assert response.execution_capabilities.digest_function
|
|
80 |
+ |
|
81 |
+ |
|
82 |
+@pytest.mark.parametrize('instance', INSTANCES)
|
|
83 |
+def test_action_cache_allow_updates_capabilities(instance):
|
|
84 |
+ storage = LRUMemoryCache(limit=256)
|
|
85 |
+ action_cache = ActionCache(storage, max_cached_refs=256, allow_updates=True)
|
|
86 |
+ |
|
87 |
+ with serve_capabilities_service([instance],
|
|
88 |
+ action_cache_instance=action_cache) as server:
|
|
89 |
+ server_interface = ServerInterface(server.remote)
|
|
90 |
+ response = server_interface.get_capabilities(instance)
|
|
91 |
+ |
|
92 |
+ assert response.cache_capabilities.action_cache_update_capabilities.update_enabled
|
|
93 |
+ |
|
94 |
+ |
|
95 |
+@pytest.mark.parametrize('instance', INSTANCES)
|
|
96 |
+def test_action_cache_not_allow_updates_capabilities(instance):
|
|
97 |
+ storage = LRUMemoryCache(limit=256)
|
|
98 |
+ action_cache = ActionCache(storage, max_cached_refs=256, allow_updates=False)
|
|
99 |
+ |
|
100 |
+ with serve_capabilities_service([instance],
|
|
101 |
+ action_cache_instance=action_cache) as server:
|
|
102 |
+ server_interface = ServerInterface(server.remote)
|
|
103 |
+ response = server_interface.get_capabilities(instance)
|
|
104 |
+ |
|
105 |
+ assert not response.cache_capabilities.action_cache_update_capabilities.update_enabled
|
|
106 |
+ |
|
107 |
+ |
|
108 |
+@pytest.mark.parametrize('instance', INSTANCES)
|
|
109 |
+def test_cas_capabilities(instance):
|
|
110 |
+ cas = ContentAddressableStorageInstance(None)
|
|
111 |
+ |
|
112 |
+ with serve_capabilities_service([instance],
|
|
113 |
+ cas_instance=cas) as server:
|
|
114 |
+ server_interface = ServerInterface(server.remote)
|
|
115 |
+ response = server_interface.get_capabilities(instance)
|
|
116 |
+ |
|
117 |
+ assert len(response.cache_capabilities.digest_function) == 1
|
|
118 |
+ assert response.cache_capabilities.digest_function[0]
|
|
119 |
+ assert response.cache_capabilities.symlink_absolute_path_strategy
|
|
120 |
+ assert response.cache_capabilities.max_batch_total_size_bytes
|
... | ... | @@ -169,7 +169,7 @@ def test_list_operations_empty(instance, context): |
169 | 169 |
|
170 | 170 |
response = instance.ListOperations(request, context)
|
171 | 171 |
|
172 |
- assert len(response.operations) is 0
|
|
172 |
+ assert not response.operations
|
|
173 | 173 |
|
174 | 174 |
|
175 | 175 |
# Send execution off, delete, try to find operation should fail
|
... | ... | @@ -222,7 +222,7 @@ def test_cancel_operation(instance, controller, execute_request, context): |
222 | 222 |
request = operations_pb2.ListOperationsRequest(name=instance_name)
|
223 | 223 |
response = instance.ListOperations(request, context)
|
224 | 224 |
|
225 |
- assert len(response.operations) is 1
|
|
225 |
+ assert len(response.operations) == 1
|
|
226 | 226 |
|
227 | 227 |
for operation in response.operations:
|
228 | 228 |
operation_metadata = remote_execution_pb2.ExecuteOperationMetadata()
|
... | ... | @@ -13,19 +13,24 @@ |
13 | 13 |
# limitations under the License.
|
14 | 14 |
|
15 | 15 |
|
16 |
-from buildgrid._app.settings import parser
|
|
17 |
-from buildgrid._app.commands.cmd_server import _create_server_from_config
|
|
18 |
-from buildgrid.server.cas.service import ByteStreamService, ContentAddressableStorageService
|
|
19 |
-from buildgrid.server.actioncache.service import ActionCacheService
|
|
20 |
-from buildgrid.server.execution.service import ExecutionService
|
|
21 |
-from buildgrid.server.operations.service import OperationsService
|
|
22 |
-from buildgrid.server.bots.service import BotsService
|
|
23 |
-from buildgrid.server.referencestorage.service import ReferenceStorageService
|
|
16 |
+import grpc
|
|
17 |
+ |
|
18 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2
|
|
19 |
+from buildgrid._protos.build.bazel.remote.execution.v2 import remote_execution_pb2_grpc
|
|
20 |
+from buildgrid._protos.buildstream.v2 import buildstream_pb2
|
|
21 |
+from buildgrid._protos.buildstream.v2 import buildstream_pb2_grpc
|
|
22 |
+from buildgrid._protos.google.bytestream import bytestream_pb2
|
|
23 |
+from buildgrid._protos.google.bytestream import bytestream_pb2_grpc
|
|
24 |
+from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2
|
|
25 |
+from buildgrid._protos.google.devtools.remoteworkers.v1test2 import bots_pb2_grpc
|
|
26 |
+from buildgrid._protos.google.longrunning import operations_pb2
|
|
27 |
+from buildgrid._protos.google.longrunning import operations_pb2_grpc
|
|
24 | 28 |
|
25 | 29 |
from .utils.utils import run_in_subprocess
|
30 |
+from .utils.server import serve
|
|
26 | 31 |
|
27 | 32 |
|
28 |
-config = """
|
|
33 |
+CONFIGURATION = """
|
|
29 | 34 |
server:
|
30 | 35 |
- !channel
|
31 | 36 |
port: 50051
|
... | ... | @@ -72,24 +77,103 @@ instances: |
72 | 77 |
|
73 | 78 |
def test_create_server():
|
74 | 79 |
# Actual test function, to be run in a subprocess:
|
75 |
- def __test_create_server(queue, config_data):
|
|
76 |
- settings = parser.get_parser().safe_load(config)
|
|
77 |
- server = _create_server_from_config(settings)
|
|
80 |
+ def __test_create_server(queue, remote):
|
|
81 |
+ # Open a channel to the remote server:
|
|
82 |
+ channel = grpc.insecure_channel(remote)
|
|
78 | 83 |
|
79 |
- server.start()
|
|
80 |
- server.stop()
|
|
84 |
+ try:
|
|
85 |
+ stub = remote_execution_pb2_grpc.ExecutionStub(channel)
|
|
86 |
+ request = remote_execution_pb2.ExecuteRequest(instance_name='main')
|
|
87 |
+ response = next(stub.Execute(request))
|
|
88 |
+ |
|
89 |
+ assert response.DESCRIPTOR is operations_pb2.Operation.DESCRIPTOR
|
|
90 |
+ |
|
91 |
+ except grpc.RpcError as e:
|
|
92 |
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED:
|
|
93 |
+ queue.put(False)
|
|
94 |
+ except AssertionError:
|
|
95 |
+ queue.put(False)
|
|
96 |
+ |
|
97 |
+ try:
|
|
98 |
+ stub = remote_execution_pb2_grpc.ActionCacheStub(channel)
|
|
99 |
+ request = remote_execution_pb2.GetActionResultRequest(instance_name='main')
|
|
100 |
+ response = stub.GetActionResult(request)
|
|
101 |
+ |
|
102 |
+ assert response.DESCRIPTOR is remote_execution_pb2.ActionResult.DESCRIPTOR
|
|
103 |
+ |
|
104 |
+ except grpc.RpcError as e:
|
|
105 |
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED:
|
|
106 |
+ queue.put(False)
|
|
107 |
+ except AssertionError:
|
|
108 |
+ queue.put(False)
|
|
109 |
+ |
|
110 |
+ try:
|
|
111 |
+ stub = remote_execution_pb2_grpc.ContentAddressableStorageStub(channel)
|
|
112 |
+ request = remote_execution_pb2.BatchUpdateBlobsRequest(instance_name='main')
|
|
113 |
+ response = stub.BatchUpdateBlobs(request)
|
|
114 |
+ |
|
115 |
+ assert response.DESCRIPTOR is remote_execution_pb2.BatchUpdateBlobsResponse.DESCRIPTOR
|
|
116 |
+ |
|
117 |
+ except grpc.RpcError as e:
|
|
118 |
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED:
|
|
119 |
+ queue.put(False)
|
|
120 |
+ except AssertionError:
|
|
121 |
+ queue.put(False)
|
|
81 | 122 |
|
82 | 123 |
try:
|
83 |
- assert isinstance(server._execution_service, ExecutionService)
|
|
84 |
- assert isinstance(server._operations_service, OperationsService)
|
|
85 |
- assert isinstance(server._bots_service, BotsService)
|
|
86 |
- assert isinstance(server._reference_storage_service, ReferenceStorageService)
|
|
87 |
- assert isinstance(server._action_cache_service, ActionCacheService)
|
|
88 |
- assert isinstance(server._cas_service, ContentAddressableStorageService)
|
|
89 |
- assert isinstance(server._bytestream_service, ByteStreamService)
|
|
124 |
+ stub = buildstream_pb2_grpc.ReferenceStorageStub(channel)
|
|
125 |
+ request = buildstream_pb2.GetReferenceRequest(instance_name='main')
|
|
126 |
+ response = stub.GetReference(request)
|
|
127 |
+ |
|
128 |
+ assert response.DESCRIPTOR is buildstream_pb2.GetReferenceResponse.DESCRIPTOR
|
|
129 |
+ |
|
130 |
+ except grpc.RpcError as e:
|
|
131 |
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED:
|
|
132 |
+ queue.put(False)
|
|
90 | 133 |
except AssertionError:
|
91 | 134 |
queue.put(False)
|
92 |
- else:
|
|
93 |
- queue.put(True)
|
|
94 | 135 |
|
95 |
- assert run_in_subprocess(__test_create_server, config)
|
|
136 |
+ try:
|
|
137 |
+ stub = bytestream_pb2_grpc.ByteStreamStub(channel)
|
|
138 |
+ request = bytestream_pb2.ReadRequest()
|
|
139 |
+ response = stub.Read(request)
|
|
140 |
+ |
|
141 |
+ assert next(response).DESCRIPTOR is bytestream_pb2.ReadResponse.DESCRIPTOR
|
|
142 |
+ |
|
143 |
+ except grpc.RpcError as e:
|
|
144 |
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED:
|
|
145 |
+ queue.put(False)
|
|
146 |
+ |
|
147 |
+ except AssertionError:
|
|
148 |
+ queue.put(False)
|
|
149 |
+ |
|
150 |
+ try:
|
|
151 |
+ stub = operations_pb2_grpc.OperationsStub(channel)
|
|
152 |
+ request = operations_pb2.ListOperationsRequest(name='main')
|
|
153 |
+ response = stub.ListOperations(request)
|
|
154 |
+ |
|
155 |
+ assert response.DESCRIPTOR is operations_pb2.ListOperationsResponse.DESCRIPTOR
|
|
156 |
+ |
|
157 |
+ except grpc.RpcError as e:
|
|
158 |
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED:
|
|
159 |
+ queue.put(False)
|
|
160 |
+ except AssertionError:
|
|
161 |
+ queue.put(False)
|
|
162 |
+ |
|
163 |
+ try:
|
|
164 |
+ stub = bots_pb2_grpc.BotsStub(channel)
|
|
165 |
+ request = bots_pb2.CreateBotSessionRequest()
|
|
166 |
+ response = stub.CreateBotSession(request)
|
|
167 |
+ |
|
168 |
+ assert response.DESCRIPTOR is bots_pb2.BotSession.DESCRIPTOR
|
|
169 |
+ |
|
170 |
+ except grpc.RpcError as e:
|
|
171 |
+ if e.code() == grpc.StatusCode.UNIMPLEMENTED:
|
|
172 |
+ queue.put(False)
|
|
173 |
+ except AssertionError:
|
|
174 |
+ queue.put(False)
|
|
175 |
+ |
|
176 |
+ queue.put(True)
|
|
177 |
+ |
|
178 |
+ with serve(CONFIGURATION) as server:
|
|
179 |
+ assert run_in_subprocess(__test_create_server, server.remote)
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+from concurrent import futures
|
|
17 |
+from contextlib import contextmanager
|
|
18 |
+import multiprocessing
|
|
19 |
+import os
|
|
20 |
+import signal
|
|
21 |
+ |
|
22 |
+import grpc
|
|
23 |
+import pytest_cov
|
|
24 |
+ |
|
25 |
+from buildgrid.server.capabilities.service import CapabilitiesService
|
|
26 |
+from buildgrid.server.capabilities.instance import CapabilitiesInstance
|
|
27 |
+ |
|
28 |
+ |
|
29 |
+@contextmanager
|
|
30 |
+def serve_capabilities_service(instances,
|
|
31 |
+ cas_instance=None,
|
|
32 |
+ action_cache_instance=None,
|
|
33 |
+ execution_instance=None):
|
|
34 |
+ server = Server(instances,
|
|
35 |
+ cas_instance,
|
|
36 |
+ action_cache_instance,
|
|
37 |
+ execution_instance)
|
|
38 |
+ try:
|
|
39 |
+ yield server
|
|
40 |
+ finally:
|
|
41 |
+ server.quit()
|
|
42 |
+ |
|
43 |
+ |
|
44 |
+class Server:
|
|
45 |
+ |
|
46 |
+ def __init__(self, instances,
|
|
47 |
+ cas_instance=None,
|
|
48 |
+ action_cache_instance=None,
|
|
49 |
+ execution_instance=None):
|
|
50 |
+ self.instances = instances
|
|
51 |
+ |
|
52 |
+ self.__queue = multiprocessing.Queue()
|
|
53 |
+ self.__process = multiprocessing.Process(
|
|
54 |
+ target=Server.serve,
|
|
55 |
+ args=(self.__queue, self.instances, cas_instance, action_cache_instance, execution_instance))
|
|
56 |
+ self.__process.start()
|
|
57 |
+ |
|
58 |
+ self.port = self.__queue.get(timeout=1)
|
|
59 |
+ self.remote = 'localhost:{}'.format(self.port)
|
|
60 |
+ |
|
61 |
+ @staticmethod
|
|
62 |
+ def serve(queue, instances, cas_instance, action_cache_instance, execution_instance):
|
|
63 |
+ pytest_cov.embed.cleanup_on_sigterm()
|
|
64 |
+ |
|
65 |
+ # Use max_workers default from Python 3.5+
|
|
66 |
+ max_workers = (os.cpu_count() or 1) * 5
|
|
67 |
+ server = grpc.server(futures.ThreadPoolExecutor(max_workers))
|
|
68 |
+ port = server.add_insecure_port('localhost:0')
|
|
69 |
+ |
|
70 |
+ capabilities_service = CapabilitiesService(server)
|
|
71 |
+ for name in instances:
|
|
72 |
+ capabilities_instance = CapabilitiesInstance(cas_instance, action_cache_instance, execution_instance)
|
|
73 |
+ capabilities_service.add_instance(name, capabilities_instance)
|
|
74 |
+ |
|
75 |
+ server.start()
|
|
76 |
+ queue.put(port)
|
|
77 |
+ signal.pause()
|
|
78 |
+ |
|
79 |
+ def quit(self):
|
|
80 |
+ if self.__process:
|
|
81 |
+ self.__process.terminate()
|
|
82 |
+ self.__process.join()
|
1 |
+# Copyright (C) 2018 Bloomberg LP
|
|
2 |
+#
|
|
3 |
+# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4 |
+# you may not use this file except in compliance with the License.
|
|
5 |
+# You may obtain a copy of the License at
|
|
6 |
+#
|
|
7 |
+# <http://www.apache.org/licenses/LICENSE-2.0>
|
|
8 |
+#
|
|
9 |
+# Unless required by applicable law or agreed to in writing, software
|
|
10 |
+# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11 |
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12 |
+# See the License for the specific language governing permissions and
|
|
13 |
+# limitations under the License.
|
|
14 |
+ |
|
15 |
+ |
|
16 |
+from contextlib import contextmanager
|
|
17 |
+import multiprocessing
|
|
18 |
+import signal
|
|
19 |
+ |
|
20 |
+import pytest_cov
|
|
21 |
+ |
|
22 |
+from buildgrid._app.settings import parser
|
|
23 |
+from buildgrid.server.instance import BuildGridServer
|
|
24 |
+ |
|
25 |
+ |
|
26 |
+@contextmanager
|
|
27 |
+def serve(configuration):
|
|
28 |
+ server = Server(configuration)
|
|
29 |
+ try:
|
|
30 |
+ yield server
|
|
31 |
+ finally:
|
|
32 |
+ server.quit()
|
|
33 |
+ |
|
34 |
+ |
|
35 |
+class Server:
|
|
36 |
+ |
|
37 |
+ def __init__(self, configuration):
|
|
38 |
+ |
|
39 |
+ self.configuration = configuration
|
|
40 |
+ |
|
41 |
+ self.__queue = multiprocessing.Queue()
|
|
42 |
+ self.__process = multiprocessing.Process(
|
|
43 |
+ target=Server.serve,
|
|
44 |
+ args=(self.__queue, self.configuration))
|
|
45 |
+ self.__process.start()
|
|
46 |
+ |
|
47 |
+ self.port = self.__queue.get()
|
|
48 |
+ self.remote = 'localhost:{}'.format(self.port)
|
|
49 |
+ |
|
50 |
+ @classmethod
|
|
51 |
+ def serve(cls, queue, configuration):
|
|
52 |
+ pytest_cov.embed.cleanup_on_sigterm()
|
|
53 |
+ |
|
54 |
+ server = BuildGridServer()
|
|
55 |
+ |
|
56 |
+ def __signal_handler(signum, frame):
|
|
57 |
+ server.stop()
|
|
58 |
+ |
|
59 |
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
60 |
+ signal.signal(signal.SIGTERM, __signal_handler)
|
|
61 |
+ |
|
62 |
+ instances = parser.get_parser().safe_load(configuration)['instances']
|
|
63 |
+ for instance in instances:
|
|
64 |
+ instance_name = instance['name']
|
|
65 |
+ services = instance['services']
|
|
66 |
+ for service in services:
|
|
67 |
+ service.register_instance_with_server(instance_name, server)
|
|
68 |
+ |
|
69 |
+ port = server.add_port('localhost:0', None)
|
|
70 |
+ |
|
71 |
+ queue.put(port)
|
|
72 |
+ |
|
73 |
+ server.start()
|
|
74 |
+ |
|
75 |
+ def quit(self):
|
|
76 |
+ if self.__process:
|
|
77 |
+ self.__process.terminate()
|
|
78 |
+ self.__process.join()
|