2727_sym_db = _symbol_database .Default ()
2828
2929
30+ from tensorflow_serving .apis import get_model_metadata_pb2 as tensorflow__serving_dot_apis_dot_get__model__metadata__pb2
3031from tensorflow_serving .apis import predict_pb2 as tensorflow__serving_dot_apis_dot_predict__pb2
3132
3233
3334DESCRIPTOR = _descriptor .FileDescriptor (
3435 name = 'tensorflow_serving/apis/prediction_service.proto' ,
3536 package = 'tensorflow.serving' ,
3637 syntax = 'proto3' ,
37- serialized_pb = _b ('\n 0tensorflow_serving/apis/prediction_service.proto\x12 \x12 tensorflow.serving\x1a %tensorflow_serving/apis/predict.proto2g \ n\x11 PredictionService\x12 R\n \x07 Predict\x12 \" .tensorflow.serving.PredictRequest\x1a #.tensorflow.serving.PredictResponseB \x03 \xf8 \x01 \x01 \x62 \x06 proto3' )
38+ serialized_pb = _b ('\n 0tensorflow_serving/apis/prediction_service.proto\x12 \x12 tensorflow.serving\x1a \x30 tensorflow_serving/apis/get_model_metadata.proto \x1a %tensorflow_serving/apis/predict.proto2 \xd6 \x01 \ n\x11 PredictionService\x12 R\n \x07 Predict\x12 \" .tensorflow.serving.PredictRequest\x1a #.tensorflow.serving.PredictResponse \x12 m \n \x10 GetModelMetadata \x12 +.tensorflow.serving.GetModelMetadataRequest \x1a ,.tensorflow.serving.GetModelMetadataResponseB \x03 \xf8 \x01 \x01 \x62 \x06 proto3' )
3839 ,
39- dependencies = [tensorflow__serving_dot_apis_dot_predict__pb2 .DESCRIPTOR ,])
40+ dependencies = [tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 . DESCRIPTOR , tensorflow__serving_dot_apis_dot_predict__pb2 .DESCRIPTOR ,])
4041_sym_db .RegisterFileDescriptor (DESCRIPTOR )
4142
4243
5455
5556
5657class PredictionServiceStub (object ):
57- """PredictionService provides access to machine-learned models loaded by
58+ """open source marker; do not remove
59+ PredictionService provides access to machine-learned models loaded by
5860 model_servers.
5961 """
6062
@@ -69,10 +71,16 @@ def __init__(self, channel):
6971 request_serializer = tensorflow__serving_dot_apis_dot_predict__pb2 .PredictRequest .SerializeToString ,
7072 response_deserializer = tensorflow__serving_dot_apis_dot_predict__pb2 .PredictResponse .FromString ,
7173 )
74+ self .GetModelMetadata = channel .unary_unary (
75+ '/tensorflow.serving.PredictionService/GetModelMetadata' ,
76+ request_serializer = tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataRequest .SerializeToString ,
77+ response_deserializer = tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataResponse .FromString ,
78+ )
7279
7380
7481class PredictionServiceServicer (object ):
75- """PredictionService provides access to machine-learned models loaded by
82+ """open source marker; do not remove
83+ PredictionService provides access to machine-learned models loaded by
7684 model_servers.
7785 """
7886
@@ -83,6 +91,13 @@ def Predict(self, request, context):
8391 context .set_details ('Method not implemented!' )
8492 raise NotImplementedError ('Method not implemented!' )
8593
94+ def GetModelMetadata (self , request , context ):
95+ """GetModelMetadata - provides access to metadata for loaded models.
96+ """
97+ context .set_code (grpc .StatusCode .UNIMPLEMENTED )
98+ context .set_details ('Method not implemented!' )
99+ raise NotImplementedError ('Method not implemented!' )
100+
86101
87102def add_PredictionServiceServicer_to_server (servicer , server ):
88103 rpc_method_handlers = {
@@ -91,6 +106,11 @@ def add_PredictionServiceServicer_to_server(servicer, server):
91106 request_deserializer = tensorflow__serving_dot_apis_dot_predict__pb2 .PredictRequest .FromString ,
92107 response_serializer = tensorflow__serving_dot_apis_dot_predict__pb2 .PredictResponse .SerializeToString ,
93108 ),
109+ 'GetModelMetadata' : grpc .unary_unary_rpc_method_handler (
110+ servicer .GetModelMetadata ,
111+ request_deserializer = tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataRequest .FromString ,
112+ response_serializer = tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataResponse .SerializeToString ,
113+ ),
94114 }
95115 generic_handler = grpc .method_handlers_generic_handler (
96116 'tensorflow.serving.PredictionService' , rpc_method_handlers )
@@ -103,13 +123,18 @@ class BetaPredictionServiceServicer(object):
103123 It is recommended to use the GA API (classes and functions in this
104124 file not marked beta) for all further purposes. This class was generated
105125 only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
106- """PredictionService provides access to machine-learned models loaded by
126+ """open source marker; do not remove
127+ PredictionService provides access to machine-learned models loaded by
107128 model_servers.
108129 """
109130 def Predict (self , request , context ):
110131 """Predict -- provides access to loaded TensorFlow model.
111132 """
112133 context .code (beta_interfaces .StatusCode .UNIMPLEMENTED )
134+ def GetModelMetadata (self , request , context ):
135+ """GetModelMetadata - provides access to metadata for loaded models.
136+ """
137+ context .code (beta_interfaces .StatusCode .UNIMPLEMENTED )
113138
114139
115140class BetaPredictionServiceStub (object ):
@@ -118,14 +143,20 @@ class BetaPredictionServiceStub(object):
118143 It is recommended to use the GA API (classes and functions in this
119144 file not marked beta) for all further purposes. This class was generated
120145 only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
121- """PredictionService provides access to machine-learned models loaded by
146+ """open source marker; do not remove
147+ PredictionService provides access to machine-learned models loaded by
122148 model_servers.
123149 """
124150 def Predict (self , request , timeout , metadata = None , with_call = False , protocol_options = None ):
125151 """Predict -- provides access to loaded TensorFlow model.
126152 """
127153 raise NotImplementedError ()
128154 Predict .future = None
155+ def GetModelMetadata (self , request , timeout , metadata = None , with_call = False , protocol_options = None ):
156+ """GetModelMetadata - provides access to metadata for loaded models.
157+ """
158+ raise NotImplementedError ()
159+ GetModelMetadata .future = None
129160
130161
131162def beta_create_PredictionService_server (servicer , pool = None , pool_size = None , default_timeout = None , maximum_timeout = None ):
@@ -135,12 +166,15 @@ def beta_create_PredictionService_server(servicer, pool=None, pool_size=None, de
135166 file not marked beta) for all further purposes. This function was
136167 generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
137168 request_deserializers = {
169+ ('tensorflow.serving.PredictionService' , 'GetModelMetadata' ): tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataRequest .FromString ,
138170 ('tensorflow.serving.PredictionService' , 'Predict' ): tensorflow__serving_dot_apis_dot_predict__pb2 .PredictRequest .FromString ,
139171 }
140172 response_serializers = {
173+ ('tensorflow.serving.PredictionService' , 'GetModelMetadata' ): tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataResponse .SerializeToString ,
141174 ('tensorflow.serving.PredictionService' , 'Predict' ): tensorflow__serving_dot_apis_dot_predict__pb2 .PredictResponse .SerializeToString ,
142175 }
143176 method_implementations = {
177+ ('tensorflow.serving.PredictionService' , 'GetModelMetadata' ): face_utilities .unary_unary_inline (servicer .GetModelMetadata ),
144178 ('tensorflow.serving.PredictionService' , 'Predict' ): face_utilities .unary_unary_inline (servicer .Predict ),
145179 }
146180 server_options = beta_implementations .server_options (request_deserializers = request_deserializers , response_serializers = response_serializers , thread_pool = pool , thread_pool_size = pool_size , default_timeout = default_timeout , maximum_timeout = maximum_timeout )
@@ -154,13 +188,17 @@ def beta_create_PredictionService_stub(channel, host=None, metadata_transformer=
154188 file not marked beta) for all further purposes. This function was
155189 generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
156190 request_serializers = {
191+ ('tensorflow.serving.PredictionService' , 'GetModelMetadata' ): tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataRequest .SerializeToString ,
157192 ('tensorflow.serving.PredictionService' , 'Predict' ): tensorflow__serving_dot_apis_dot_predict__pb2 .PredictRequest .SerializeToString ,
158193 }
159194 response_deserializers = {
195+ ('tensorflow.serving.PredictionService' , 'GetModelMetadata' ): tensorflow__serving_dot_apis_dot_get__model__metadata__pb2 .GetModelMetadataResponse .FromString ,
160196 ('tensorflow.serving.PredictionService' , 'Predict' ): tensorflow__serving_dot_apis_dot_predict__pb2 .PredictResponse .FromString ,
161197 }
162198 cardinalities = {
199+ 'GetModelMetadata' : cardinality .Cardinality .UNARY_UNARY ,
163200 'Predict' : cardinality .Cardinality .UNARY_UNARY ,
164201 }
165202 stub_options = beta_implementations .stub_options (host = host , metadata_transformer = metadata_transformer , request_serializers = request_serializers , response_deserializers = response_deserializers , thread_pool = pool , thread_pool_size = pool_size )
166203 return beta_implementations .dynamic_stub (channel , 'tensorflow.serving.PredictionService' , cardinalities , options = stub_options )
204+ # @@protoc_insertion_point(module_scope)
0 commit comments