Merge pull request #888 from alainjobart/resharding

Resharding
This commit is contained in:
Alain Jobart 2015-07-17 11:20:38 -07:00
Родитель 3de9bdb49e b2a09f8f4b
Коммит d66ae6486d
41 изменённых файлов: 13759 добавлений и 311 удалений

Просмотреть файл

@ -181,18 +181,15 @@ v3_test:
bson:
go generate ./go/...
# This rule rebuilds all the go files from the proto definitions for gRPC
# FIXME(alainjobart) also add support for python gRPC stubs, right now
# it's only the proto files without gRPC
# This rule rebuilds all the go files from the proto definitions for gRPC.
# 1. list all proto files.
# 2. remove 'proto/' prefix and '.proto' suffix.
# 3. run protoc for each proto and put in go/vt/proto/${proto_file_name}
# 3. (go) run protoc for each proto and put in go/vt/proto/${proto_file_name}/
# 4. (python) run protoc for each proto and put in py/vtproto/
proto:
find proto -name '*.proto' -print | sed 's/^proto\///' | sed 's/\.proto//' | xargs -I{} $$VTROOT/dist/protobuf/bin/protoc -Iproto proto/{}.proto --go_out=plugins=grpc:go/vt/proto/{}
find go/vt/proto -name "*.pb.go" | xargs sed --in-place -r -e 's,import ([a-z0-9_]+) ".",import \1 "github.com/youtube/vitess/go/vt/proto/\1",g'
cd py/vtctl && $$VTROOT/dist/protobuf/bin/protoc -I../../proto ../../proto/logutil.proto --python_out=. --grpc_out=. --plugin=protoc-gen-grpc=$$VTROOT/dist/grpc/bin/grpc_python_plugin
cd py/vtctl && $$VTROOT/dist/protobuf/bin/protoc -I../../proto ../../proto/vtctldata.proto --python_out=. --grpc_out=. --plugin=protoc-gen-grpc=$$VTROOT/dist/grpc/bin/grpc_python_plugin
cd py/vtctl && $$VTROOT/dist/protobuf/bin/protoc -I../../proto ../../proto/vtctlservice.proto --python_out=. --grpc_out=. --plugin=protoc-gen-grpc=$$VTROOT/dist/grpc/bin/grpc_python_plugin
find proto -name '*.proto' -print | sed 's/^proto\///' | sed 's/\.proto//' | xargs -I{} $$VTROOT/dist/protobuf/bin/protoc -Iproto proto/{}.proto --python_out=py/vtproto --grpc_out=py/vtproto --plugin=protoc-gen-grpc=$$VTROOT/dist/grpc/bin/grpc_python_plugin
# This rule builds the bootstrap images for all flavors.
docker_bootstrap:

Просмотреть файл

@ -2,9 +2,6 @@
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
import logging
import re
from net import bsonrpc
from vtctl import vtctl_client
@ -22,7 +19,7 @@ class GoRpcVtctlClient(vtctl_client.VctlClient):
self.connected = False
def __str__(self):
return '<VtctlClient %s>' % self.addr
return '<GoRpcVtctlClient %s>' % self.addr
def dial(self):
if self.connected:
@ -38,40 +35,17 @@ class GoRpcVtctlClient(vtctl_client.VctlClient):
def is_closed(self):
return self.client.is_closed()
def execute_vtctl_command(self, args, action_timeout=30.0,
lock_timeout=5.0, info_to_debug=False):
"""Executes a remote command on the vtctl server.
Args:
args: Command line to run.
action_timeout: total timeout for the action (float, in seconds).
lock_timeout: timeout for locking topology (float, in seconds).
info_to_debug: if set, changes the info messages to debug.
Returns:
The console output of the action.
"""
def execute_vtctl_command(self, args, action_timeout=30.0, lock_timeout=5.0):
req = {
'Args': args,
'ActionTimeout': long(action_timeout * 1000000000),
'LockTimeout': long(lock_timeout * 1000000000),
}
self.client.stream_call('VtctlServer.ExecuteVtctlCommand', req)
console_result = ''
while True:
e = self.client.stream_next()
if e is None:
break
if e.reply['Level'] == 0:
if info_to_debug:
logging.debug('%s', e.reply['Value'])
else:
logging.info('%s', e.reply['Value'])
elif e.reply['Level'] == 1:
logging.warning('%s', e.reply['Value'])
elif e.reply['Level'] == 2:
logging.error('%s', e.reply['Value'])
elif e.reply['Level'] == 3:
console_result += e.reply['Value']
return console_result
yield vtctl_client.Event(e.reply['Time'], e.reply['Level'],
e.reply['File'], e.reply['Line'],
e.reply['Value'])

Просмотреть файл

@ -6,74 +6,63 @@
# It is untested and doesn't work just yet: ExecuteVtctlCommand
# just seems to time out.
import logging
import datetime
from urlparse import urlparse
import vtctl_client
import vtctldata_pb2
import vtctlservice_pb2
from vtproto import vtctldata_pb2
from vtproto import vtctlservice_pb2
class GRPCVtctlClient(vtctl_client.VctlClient):
"""GoRpcVtctlClient is the gRPC implementation of VctlClient.
It is registered as 'grpc' protocol.
"""
"""GoRpcVtctlClient is the gRPC implementation of VctlClient.
It is registered as 'grpc' protocol.
"""
def __init__(self, addr, timeout, user=None, password=None, encrypted=False,
keyfile=None, certfile=None):
self.addr = addr
self.timeout = timeout
self.stub = None
def __init__(self, addr, timeout):
self.addr = addr
self.timeout = timeout
self.stub = None
def __str__(self):
return '<VtctlClient %s>' % self.addr
def __str__(self):
return '<GRPCVtctlClient %s>' % self.addr
def dial(self):
if self.stub:
self.stub.close()
def dial(self):
if self.stub:
self.stub.close()
p = urlparse("http://" + self.addr)
self.stub = vtctlservice_pb2.early_adopter_create_Vtctl_stub(p.hostname,
p.port)
p = urlparse('http://' + self.addr)
self.stub = vtctlservice_pb2.early_adopter_create_Vtctl_stub(p.hostname,
p.port)
def close(self):
self.stub.close()
self.stub = None
def close(self):
self.stub.close()
self.stub = None
def is_closed(self):
return self.stub == None
def is_closed(self):
return self.stub == None
def execute_vtctl_command(self, args, action_timeout=30.0,
lock_timeout=5.0, info_to_debug=False):
"""Executes a remote command on the vtctl server.
def execute_vtctl_command(self, args, action_timeout=30.0, lock_timeout=5.0):
req = vtctldata_pb2.ExecuteVtctlCommandRequest(
args=args,
action_timeout=long(action_timeout * 1000000000),
lock_timeout=long(lock_timeout * 1000000000))
with self.stub as stub:
it = stub.ExecuteVtctlCommand(req, action_timeout)
for response in it:
t = datetime.datetime.utcfromtimestamp(response.event.time.seconds)
try:
yield vtctl_client.Event(t, response.event.level, response.event.file,
response.event.line, response.event.value)
except GeneratorExit:
# if the loop is interrupted for any reason, we need to
# cancel the iterator, so we close the RPC connection,
# and the with __exit__ statement is executed.
Args:
args: Command line to run.
action_timeout: total timeout for the action (float, in seconds).
lock_timeout: timeout for locking topology (float, in seconds).
info_to_debug: if set, changes the info messages to debug.
# FIXME(alainjobart) this is flaky. It sometimes doesn't stop
# the iterator, and we don't get out of the 'with'.
# Sending a Ctrl-C to the process then works for some reason.
it.cancel()
break
Returns:
The console output of the action.
"""
req = vtctldata_pb2.ExecuteVtctlCommandRequest(
args=args,
action_timeout=long(action_timeout * 1000000000),
lock_timeout=long(lock_timeout * 1000000000))
console_result = ''
with self.stub as stub:
for response in stub.ExecuteVtctlCommand(req, action_timeout):
if response.event.level == 0:
if info_to_debug:
logging.debug('%s', response.event.value)
else:
logging.info('%s', response.event.value)
elif response.event.level == 1:
logging.warning('%s', response.event.value)
elif response.event.level == 2:
logging.error('%s', response.event.value)
elif response.event.level == 3:
console_result += response.event.value
return console_result
vtctl_client.register_conn_class("grpc", GRPCVtctlClient)
vtctl_client.register_conn_class('grpc', GRPCVtctlClient)

Просмотреть файл

@ -21,14 +21,14 @@ def register_conn_class(protocol, c):
def connect(protocol, *pargs, **kargs):
"""connect will return a dialed VctlClient connection to a vtctl server.
"""connect will return a dialed VtctlClient connection to a vtctl server.
Args:
protocol: the registered protocol to use.
arsg: passed to the registered protocol __init__ method.
Returns:
A dialed VctlClient.
A dialed VtctlClient.
"""
if not protocol in vtctl_client_conn_classes:
raise Exception('Unknown vtclient protocol', protocol)
@ -37,6 +37,24 @@ def connect(protocol, *pargs, **kargs):
return conn
class Event(object):
"""Event is streamed by VctlClient.
Eventually, we will just use the proto3 definition for logutil.proto/Event.
"""
INFO = 0
WARNING = 1
ERROR = 2
CONSOLE = 3
def __init__(self, time, level, file, line, value):
self.time = time
self.level = level
self.file = file
self.line = line
self.value = value
class VctlClient(object):
"""VctlClient is the interface for the vtctl client implementations.
All implementations must implement all these methods.
@ -70,17 +88,49 @@ class VctlClient(object):
"""
pass
def execute_vtctl_command(self, args, action_timeout=30.0,
lock_timeout=5.0, info_to_debug=False):
def execute_vtctl_command(self, args, action_timeout=30.0, lock_timeout=5.0):
"""Executes a remote command on the vtctl server.
Args:
args: Command line to run.
action_timeout: total timeout for the action (float, in seconds).
lock_timeout: timeout for locking topology (float, in seconds).
info_to_debug: if set, changes the info messages to debug.
Returns:
The console output of the action.
This is a generator method that yields Event objects.
"""
pass
def execute_vtctl_command(client, args, action_timeout=30.0,
lock_timeout=5.0, info_to_debug=False):
"""This is a helper method that executes a remote vtctl command, logs
the output to the logging module, and returns the console output.
Args:
client: VtctlClient object to use.
args: Command line to run.
action_timeout: total timeout for the action (float, in seconds).
lock_timeout: timeout for locking topology (float, in seconds).
info_to_debug: if set, changes the info messages to debug.
Returns:
The console output of the action.
"""
console_result = ''
for e in client.execute_vtctl_command(args, action_timeout=action_timeout,
lock_timeout=lock_timeout):
if e.level == Event.INFO:
if info_to_debug:
logging.debug('%s', e.value)
else:
logging.info('%s', e.value)
elif e.level == Event.WARNING:
logging.warning('%s', e.value)
elif e.level == Event.ERROR:
logging.error('%s', e.value)
elif e.level == Event.CONSOLE:
console_result += e.value
return console_result

Просмотреть файл

@ -17,7 +17,6 @@ management.
import contextlib
import functools
import logging
from vtdb import dbexceptions
from vtdb import shard_constants

Просмотреть файл

@ -4,8 +4,6 @@ Vitess sharding scheme is range-sharded. Vitess supports routing for
other sharding schemes by allowing explicit shard_name addressing.
This implementation is not fully complete as yet.
"""
import logging
from vtdb import db_object
from vtdb import dbexceptions
from vtdb import shard_constants

Просмотреть файл

@ -6,7 +6,6 @@ extends the functionality for getting, creating, updating and deleting
the lookup relationship.
"""
import functools
import logging
import struct
from vtdb import db_object

Просмотреть файл

@ -5,7 +5,6 @@ for the common database operations is defined in DBObjectBase.
DBObjectUnsharded defines the cursor creation methods for the same.
"""
import functools
import logging
import struct
from vtdb import db_object

Просмотреть файл

@ -0,0 +1,106 @@
# Copyright 2015, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
from itertools import izip
import logging
from net import gorpc
from net import bsonrpc
from vtdb import dbexceptions
from vtdb import field_types
from vtdb import update_stream
def _make_row(row, conversions):
converted_row = []
for conversion_func, field_data in izip(conversions, row):
if field_data is None:
v = None
elif conversion_func:
v = conversion_func(field_data)
else:
v = field_data
converted_row.append(v)
return converted_row
class GoRpcUpdateStreamConnection(update_stream.UpdateStreamConnection):
"""GoRpcUpdateStreamConnection is the go rpc implementation of
UpdateStreamConnection.
It is registered as 'gorpc' protocol.
"""
def __init__(self, addr, timeout, user=None, password=None, encrypted=False,
keyfile=None, certfile=None):
self.addr = addr
self.timeout = timeout
self.client = bsonrpc.BsonRpcClient(addr, timeout, user=user,
password=password, encrypted=encrypted,
keyfile=keyfile, certfile=certfile)
self.connected = False
def __str__(self):
return '<GoRpcUpdateStreamConnection %s>' % self.addr
def dial(self):
if self.connected:
self.client.close()
self.client.dial()
self.connected = True
def close(self):
self.connected = False
self.client.close()
def is_closed(self):
return self.client.is_closed()
def stream_update(self, position, timeout=3600.0):
"""Note this implementation doesn't honor the timeout."""
try:
self.client.stream_call('UpdateStream.ServeUpdateStream',
{"Position": position})
while True:
response = self.client.stream_next()
if response is None:
break
reply = response.reply
str_category = reply['Category']
if str_category == 'DML':
category = update_stream.StreamEvent.DML
elif str_category == 'DDL':
category = update_stream.StreamEvent.DDL
elif str_category == 'POS':
category = update_stream.StreamEvent.POS
else:
category = update_stream.StreamEvent.ERR
fields = []
rows = []
if reply['PrimaryKeyFields']:
conversions = []
for field in reply['PrimaryKeyFields']:
fields.append(field['Name'])
conversions.append(field_types.conversions.get(field['Type']))
for pk_list in reply['PrimaryKeyValues']:
if not pk_list:
continue
row = tuple(_make_row(pk_list, conversions))
rows.append(row)
yield update_stream.StreamEvent(category, reply['TableName'],
fields, rows, reply['Sql'],
reply['Timestamp'],
reply['GTIDField'])
except gorpc.AppError as e:
raise dbexceptions.DatabaseError(*e.args)
except gorpc.GoRpcError as e:
raise dbexceptions.OperationalError(*e.args)
except:
raise
update_stream.register_conn_class('gorpc', GoRpcUpdateStreamConnection)

Просмотреть файл

@ -0,0 +1,106 @@
# Copyright 2015, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
from itertools import izip
import logging
from urlparse import urlparse
from vtdb import dbexceptions
from vtdb import field_types
from vtdb import update_stream
from vtproto import binlogdata_pb2
from vtproto import binlogservice_pb2
from vtproto import replicationdata_pb2
def _make_row(row, conversions):
converted_row = []
for conversion_func, field_data in izip(conversions, row):
if field_data is None:
v = None
elif conversion_func:
v = conversion_func(field_data)
else:
v = field_data
converted_row.append(v)
return converted_row
class GRPCUpdateStreamConnection(update_stream.UpdateStreamConnection):
"""GRPCUpdateStreamConnection is the gRPC implementation of
UpdateStreamConnection.
It is registered as 'grpc' protocol.
"""
def __init__(self, addr, timeout):
self.addr = addr
self.timeout = timeout
self.stub = None
def __str__(self):
return '<GRPCUpdateStreamConnection %s>' % self.addr
def dial(self):
p = urlparse('http://' + self.addr)
self.stub = binlogservice_pb2.early_adopter_create_UpdateStream_stub(
p.hostname, p.port)
def close(self):
self.stub = None
def is_closed(self):
return self.stub == None
def stream_update(self, position, timeout=3600.0):
# FIXME(alainjobart) the parameter we pass for position should be
# the string encoded one, we shouldn't need the structures.
if 'MariaDB' in position:
parts = position['MariaDB'].split("-")
domain = long(parts[0])
server = long(parts[1])
sequence = long(parts[2])
p3_position = replicationdata_pb2.Position(
mariadb_gtid=replicationdata_pb2.MariadbGtid(domain=domain,
server=server,
sequence=sequence))
else:
raise NotImplemented("Only MariaDB encoding is supported here for now")
req = binlogdata_pb2.StreamUpdateRequest(position=p3_position)
with self.stub as stub:
it = stub.StreamUpdate(req, timeout)
for response in it:
stream_event = response.stream_event
fields = []
rows = []
if stream_event.primary_key_fields:
conversions = []
for field in stream_event.primary_key_fields:
fields.append(field.name)
conversions.append(field_types.conversions.get(field.type))
for r in stream_event.primary_key_values:
row = tuple(_make_row(r.values, conversions))
rows.append(row)
try:
yield update_stream.StreamEvent(int(stream_event.category),
stream_event.table_name,
fields, rows,
stream_event.sql,
stream_event.timestamp,
stream_event.gtid)
except GeneratorExit:
# if the loop is interrupted for any reason, we need to
# cancel the iterator, so we close the RPC connection,
# and the with __exit__ statement is executed.
# FIXME(alainjobart) this is flaky. It sometimes doesn't stop
# the iterator, and we don't get out of the 'with'.
# Sending a Ctrl-C to the process then works for some reason.
it.cancel()
break
update_stream.register_conn_class('grpc', GRPCUpdateStreamConnection)

Просмотреть файл

@ -2,7 +2,6 @@
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
import logging
import random
from zk import zkocc

Просмотреть файл

@ -13,7 +13,6 @@
# soon.
#
import logging
import random
import time

106
py/vtdb/update_stream.py Normal file
Просмотреть файл

@ -0,0 +1,106 @@
# Copyright 2015, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# mapping from protocol to python class. The protocol matches the string
# used by vttablet as a -binlog_player_protocol parameter.
update_stream_conn_classes = dict()
def register_conn_class(protocol, c):
"""Used by implementations to register themselves.
Args:
protocol: short string to document the protocol.
c: class to register.
"""
update_stream_conn_classes[protocol] = c
def connect(protocol, *pargs, **kargs):
"""connect will return a dialed UpdateStreamConnection connection to
an update stream server.
Args:
protocol: the registered protocol to use.
arsg: passed to the registered protocol __init__ method.
Returns:
A dialed UpdateStreamConnection.
"""
if not protocol in update_stream_conn_classes:
raise Exception('Unknown update stream protocol', protocol)
conn = update_stream_conn_classes[protocol](*pargs, **kargs)
conn.dial()
return conn
class StreamEvent(object):
"""StreamEvent describes a single event in the update stream.
Eventually we will use the proto3 definition object.
"""
ERR = 0
DML = 1
DDL = 2
POS = 3
def __init__(self, category, table_name, fields, rows, sql, timestamp,
position):
self.category = category
self.table_name = table_name
self.fields = fields
self.rows = rows
self.sql = sql
self.timestamp = timestamp
self.position = position
class UpdateStreamConnection(object):
"""UpdateStreamConnection is the interface for the update stream
client implementations.
All implementations must implement all these methods.
If something goes wrong with the connection, this object will be thrown out.
"""
def __init__(self, addr, timeout):
"""Initialize an update stream connection.
Args:
addr: server address. Can be protocol dependent.
timeout: connection timeout (float, in seconds).
"""
pass
def dial(self):
"""Dial to the server. If successful, call close() to close the connection.
"""
pass
def close(self):
"""Close the connection. This object may be re-used again by calling dial().
"""
pass
def is_closed(self):
"""Checks the connection status.
Returns:
True if this connection is closed.
"""
pass
def stream_update(self, position, timeout=3600.0):
"""Generator method to stream the updates from a given replication point.
Args:
position: Starting position to stream from.
timeout: Should stop streaming after we reach this timeout.
Returns:
This is a generator method that yields StreamEvent objects.
"""
pass

Просмотреть файл

@ -1,101 +0,0 @@
#! /usr/bin/python
from itertools import izip
import logging
from net import gorpc
from net import bsonrpc
from vtdb import dbexceptions
from vtdb import field_types
class Coord(object):
Position = None
ServerId = None
def __init__(self, replPos, server_id = None):
self.Position = replPos
self.ServerId = server_id
def _make_row(row, conversions):
converted_row = []
for conversion_func, field_data in izip(conversions, row):
if field_data is None:
v = None
elif conversion_func:
v = conversion_func(field_data)
else:
v = field_data
converted_row.append(v)
return converted_row
class EventData(object):
Category = None
TableName = None
PrimaryKeyFields = None
PrimaryKeyValues = None
Sql = None
Timestamp = None
GTIDField = None
def __init__(self, raw_response):
for key, val in raw_response.iteritems():
self.__dict__[key] = val
self.PkRows = []
del self.__dict__['PrimaryKeyFields']
del self.__dict__['PrimaryKeyValues']
# build the conversions
if not raw_response['PrimaryKeyFields']:
return
self.Fields = []
conversions = []
for field in raw_response['PrimaryKeyFields']:
self.Fields.append(field['Name'])
conversions.append(field_types.conversions.get(field['Type']))
# and parse the results
for pkList in raw_response['PrimaryKeyValues']:
if not pkList:
continue
pk_row = tuple(_make_row(pkList, conversions))
self.PkRows.append(pk_row)
class UpdateStreamConnection(object):
def __init__(self, addr, timeout, user=None, password=None, encrypted=False, keyfile=None, certfile=None):
self.client = bsonrpc.BsonRpcClient(addr, timeout, user, password, encrypted, keyfile, certfile)
def dial(self):
self.client.dial()
def close(self):
self.client.close()
def stream_start(self, replPos):
try:
self.client.stream_call('UpdateStream.ServeUpdateStream', {"Position": replPos})
response = self.client.stream_next()
if response is None:
return None
return EventData(response.reply).__dict__
except gorpc.GoRpcError as e:
raise dbexceptions.OperationalError(*e.args)
except:
logging.exception('gorpc low-level error')
raise
def stream_next(self):
try:
response = self.client.stream_next()
if response is None:
return None
return EventData(response.reply).__dict__
except gorpc.AppError as e:
raise dbexceptions.DatabaseError(*e.args)
except gorpc.GoRpcError as e:
raise dbexceptions.OperationalError(*e.args)
except:
logging.exception('gorpc low-level error')
raise

0
py/vtproto/__init__.py Normal file
Просмотреть файл

Просмотреть файл

@ -0,0 +1,631 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: automation.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='automation.proto',
package='automation',
syntax='proto3',
serialized_pb=_b('\n\x10\x61utomation.proto\x12\nautomation\"\x90\x01\n\x10\x43lusterOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12/\n\x0cserial_tasks\x18\x02 \x03(\x0b\x32\x19.automation.TaskContainer\x12\x30\n\x05state\x18\x03 \x01(\x0e\x32!.automation.ClusterOperationState\x12\r\n\x05\x65rror\x18\x04 \x01(\t\"N\n\rTaskContainer\x12(\n\x0eparallel_tasks\x18\x01 \x03(\x0b\x32\x10.automation.Task\x12\x13\n\x0b\x63oncurrency\x18\x02 \x01(\x05\"\xce\x01\n\x04Task\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\nparameters\x18\x02 \x03(\x0b\x32 .automation.Task.ParametersEntry\x12\n\n\x02id\x18\x03 \x01(\t\x12$\n\x05state\x18\x04 \x01(\x0e\x32\x15.automation.TaskState\x12\x0e\n\x06output\x18\x05 \x01(\t\x12\r\n\x05\x65rror\x18\x06 \x01(\t\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xb1\x01\n\x1e\x45nqueueClusterOperationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\nparameters\x18\x02 \x03(\x0b\x32:.automation.EnqueueClusterOperationRequest.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"-\n\x1f\x45nqueueClusterOperationResponse\x12\n\n\x02id\x18\x01 \x01(\t\"-\n\x1fGetClusterOperationStateRequest\x12\n\n\x02id\x18\x01 \x01(\t\"T\n GetClusterOperationStateResponse\x12\x30\n\x05state\x18\x01 \x01(\x0e\x32!.automation.ClusterOperationState\"/\n!GetClusterOperationDetailsRequest\x12\n\n\x02id\x18\x01 \x01(\t\"V\n\"GetClusterOperationDetailsResponse\x12\x30\n\ncluster_op\x18\x02 \x01(\x0b\x32\x1c.automation.ClusterOperation*\x9a\x01\n\x15\x43lusterOperationState\x12#\n\x1fUNKNOWN_CLUSTER_OPERATION_STATE\x10\x00\x12!\n\x1d\x43LUSTER_OPERATION_NOT_STARTED\x10\x01\x12\x1d\n\x19\x43LUSTER_OPERATION_RUNNING\x10\x02\x12\x1a\n\x16\x43LUSTER_OPERATION_DONE\x10\x03*K\n\tTaskState\x12\x16\n\x12UNKNOWN_TASK_STATE\x10\x00\x12\x0f\n\x0bNOT_STARTED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_CLUSTEROPERATIONSTATE = _descriptor.EnumDescriptor(
name='ClusterOperationState',
full_name='automation.ClusterOperationState',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_CLUSTER_OPERATION_STATE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CLUSTER_OPERATION_NOT_STARTED', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CLUSTER_OPERATION_RUNNING', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CLUSTER_OPERATION_DONE', index=3, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=966,
serialized_end=1120,
)
_sym_db.RegisterEnumDescriptor(_CLUSTEROPERATIONSTATE)
ClusterOperationState = enum_type_wrapper.EnumTypeWrapper(_CLUSTEROPERATIONSTATE)
_TASKSTATE = _descriptor.EnumDescriptor(
name='TaskState',
full_name='automation.TaskState',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_TASK_STATE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NOT_STARTED', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RUNNING', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DONE', index=3, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1122,
serialized_end=1197,
)
_sym_db.RegisterEnumDescriptor(_TASKSTATE)
TaskState = enum_type_wrapper.EnumTypeWrapper(_TASKSTATE)
UNKNOWN_CLUSTER_OPERATION_STATE = 0
CLUSTER_OPERATION_NOT_STARTED = 1
CLUSTER_OPERATION_RUNNING = 2
CLUSTER_OPERATION_DONE = 3
UNKNOWN_TASK_STATE = 0
NOT_STARTED = 1
RUNNING = 2
DONE = 3
_CLUSTEROPERATION = _descriptor.Descriptor(
name='ClusterOperation',
full_name='automation.ClusterOperation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='automation.ClusterOperation.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='serial_tasks', full_name='automation.ClusterOperation.serial_tasks', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='automation.ClusterOperation.state', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='error', full_name='automation.ClusterOperation.error', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=33,
serialized_end=177,
)
_TASKCONTAINER = _descriptor.Descriptor(
name='TaskContainer',
full_name='automation.TaskContainer',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='parallel_tasks', full_name='automation.TaskContainer.parallel_tasks', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='concurrency', full_name='automation.TaskContainer.concurrency', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=179,
serialized_end=257,
)
_TASK_PARAMETERSENTRY = _descriptor.Descriptor(
name='ParametersEntry',
full_name='automation.Task.ParametersEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='automation.Task.ParametersEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='automation.Task.ParametersEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=417,
serialized_end=466,
)
_TASK = _descriptor.Descriptor(
name='Task',
full_name='automation.Task',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='automation.Task.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameters', full_name='automation.Task.parameters', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='id', full_name='automation.Task.id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='automation.Task.state', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output', full_name='automation.Task.output', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='error', full_name='automation.Task.error', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_TASK_PARAMETERSENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=260,
serialized_end=466,
)
_ENQUEUECLUSTEROPERATIONREQUEST_PARAMETERSENTRY = _descriptor.Descriptor(
name='ParametersEntry',
full_name='automation.EnqueueClusterOperationRequest.ParametersEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='automation.EnqueueClusterOperationRequest.ParametersEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='automation.EnqueueClusterOperationRequest.ParametersEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=417,
serialized_end=466,
)
_ENQUEUECLUSTEROPERATIONREQUEST = _descriptor.Descriptor(
name='EnqueueClusterOperationRequest',
full_name='automation.EnqueueClusterOperationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='automation.EnqueueClusterOperationRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameters', full_name='automation.EnqueueClusterOperationRequest.parameters', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_ENQUEUECLUSTEROPERATIONREQUEST_PARAMETERSENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=469,
serialized_end=646,
)
_ENQUEUECLUSTEROPERATIONRESPONSE = _descriptor.Descriptor(
name='EnqueueClusterOperationResponse',
full_name='automation.EnqueueClusterOperationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='automation.EnqueueClusterOperationResponse.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=648,
serialized_end=693,
)
_GETCLUSTEROPERATIONSTATEREQUEST = _descriptor.Descriptor(
name='GetClusterOperationStateRequest',
full_name='automation.GetClusterOperationStateRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='automation.GetClusterOperationStateRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=695,
serialized_end=740,
)
_GETCLUSTEROPERATIONSTATERESPONSE = _descriptor.Descriptor(
name='GetClusterOperationStateResponse',
full_name='automation.GetClusterOperationStateResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='state', full_name='automation.GetClusterOperationStateResponse.state', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=742,
serialized_end=826,
)
_GETCLUSTEROPERATIONDETAILSREQUEST = _descriptor.Descriptor(
name='GetClusterOperationDetailsRequest',
full_name='automation.GetClusterOperationDetailsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='automation.GetClusterOperationDetailsRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=828,
serialized_end=875,
)
_GETCLUSTEROPERATIONDETAILSRESPONSE = _descriptor.Descriptor(
name='GetClusterOperationDetailsResponse',
full_name='automation.GetClusterOperationDetailsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='cluster_op', full_name='automation.GetClusterOperationDetailsResponse.cluster_op', index=0,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=877,
serialized_end=963,
)
_CLUSTEROPERATION.fields_by_name['serial_tasks'].message_type = _TASKCONTAINER
_CLUSTEROPERATION.fields_by_name['state'].enum_type = _CLUSTEROPERATIONSTATE
_TASKCONTAINER.fields_by_name['parallel_tasks'].message_type = _TASK
_TASK_PARAMETERSENTRY.containing_type = _TASK
_TASK.fields_by_name['parameters'].message_type = _TASK_PARAMETERSENTRY
_TASK.fields_by_name['state'].enum_type = _TASKSTATE
_ENQUEUECLUSTEROPERATIONREQUEST_PARAMETERSENTRY.containing_type = _ENQUEUECLUSTEROPERATIONREQUEST
_ENQUEUECLUSTEROPERATIONREQUEST.fields_by_name['parameters'].message_type = _ENQUEUECLUSTEROPERATIONREQUEST_PARAMETERSENTRY
_GETCLUSTEROPERATIONSTATERESPONSE.fields_by_name['state'].enum_type = _CLUSTEROPERATIONSTATE
_GETCLUSTEROPERATIONDETAILSRESPONSE.fields_by_name['cluster_op'].message_type = _CLUSTEROPERATION
DESCRIPTOR.message_types_by_name['ClusterOperation'] = _CLUSTEROPERATION
DESCRIPTOR.message_types_by_name['TaskContainer'] = _TASKCONTAINER
DESCRIPTOR.message_types_by_name['Task'] = _TASK
DESCRIPTOR.message_types_by_name['EnqueueClusterOperationRequest'] = _ENQUEUECLUSTEROPERATIONREQUEST
DESCRIPTOR.message_types_by_name['EnqueueClusterOperationResponse'] = _ENQUEUECLUSTEROPERATIONRESPONSE
DESCRIPTOR.message_types_by_name['GetClusterOperationStateRequest'] = _GETCLUSTEROPERATIONSTATEREQUEST
DESCRIPTOR.message_types_by_name['GetClusterOperationStateResponse'] = _GETCLUSTEROPERATIONSTATERESPONSE
DESCRIPTOR.message_types_by_name['GetClusterOperationDetailsRequest'] = _GETCLUSTEROPERATIONDETAILSREQUEST
DESCRIPTOR.message_types_by_name['GetClusterOperationDetailsResponse'] = _GETCLUSTEROPERATIONDETAILSRESPONSE
DESCRIPTOR.enum_types_by_name['ClusterOperationState'] = _CLUSTEROPERATIONSTATE
DESCRIPTOR.enum_types_by_name['TaskState'] = _TASKSTATE
ClusterOperation = _reflection.GeneratedProtocolMessageType('ClusterOperation', (_message.Message,), dict(
DESCRIPTOR = _CLUSTEROPERATION,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.ClusterOperation)
))
_sym_db.RegisterMessage(ClusterOperation)
TaskContainer = _reflection.GeneratedProtocolMessageType('TaskContainer', (_message.Message,), dict(
DESCRIPTOR = _TASKCONTAINER,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.TaskContainer)
))
_sym_db.RegisterMessage(TaskContainer)
Task = _reflection.GeneratedProtocolMessageType('Task', (_message.Message,), dict(
ParametersEntry = _reflection.GeneratedProtocolMessageType('ParametersEntry', (_message.Message,), dict(
DESCRIPTOR = _TASK_PARAMETERSENTRY,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.Task.ParametersEntry)
))
,
DESCRIPTOR = _TASK,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.Task)
))
_sym_db.RegisterMessage(Task)
_sym_db.RegisterMessage(Task.ParametersEntry)
EnqueueClusterOperationRequest = _reflection.GeneratedProtocolMessageType('EnqueueClusterOperationRequest', (_message.Message,), dict(
ParametersEntry = _reflection.GeneratedProtocolMessageType('ParametersEntry', (_message.Message,), dict(
DESCRIPTOR = _ENQUEUECLUSTEROPERATIONREQUEST_PARAMETERSENTRY,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.EnqueueClusterOperationRequest.ParametersEntry)
))
,
DESCRIPTOR = _ENQUEUECLUSTEROPERATIONREQUEST,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.EnqueueClusterOperationRequest)
))
_sym_db.RegisterMessage(EnqueueClusterOperationRequest)
_sym_db.RegisterMessage(EnqueueClusterOperationRequest.ParametersEntry)
EnqueueClusterOperationResponse = _reflection.GeneratedProtocolMessageType('EnqueueClusterOperationResponse', (_message.Message,), dict(
DESCRIPTOR = _ENQUEUECLUSTEROPERATIONRESPONSE,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.EnqueueClusterOperationResponse)
))
_sym_db.RegisterMessage(EnqueueClusterOperationResponse)
GetClusterOperationStateRequest = _reflection.GeneratedProtocolMessageType('GetClusterOperationStateRequest', (_message.Message,), dict(
DESCRIPTOR = _GETCLUSTEROPERATIONSTATEREQUEST,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.GetClusterOperationStateRequest)
))
_sym_db.RegisterMessage(GetClusterOperationStateRequest)
GetClusterOperationStateResponse = _reflection.GeneratedProtocolMessageType('GetClusterOperationStateResponse', (_message.Message,), dict(
DESCRIPTOR = _GETCLUSTEROPERATIONSTATERESPONSE,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.GetClusterOperationStateResponse)
))
_sym_db.RegisterMessage(GetClusterOperationStateResponse)
GetClusterOperationDetailsRequest = _reflection.GeneratedProtocolMessageType('GetClusterOperationDetailsRequest', (_message.Message,), dict(
DESCRIPTOR = _GETCLUSTEROPERATIONDETAILSREQUEST,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.GetClusterOperationDetailsRequest)
))
_sym_db.RegisterMessage(GetClusterOperationDetailsRequest)
GetClusterOperationDetailsResponse = _reflection.GeneratedProtocolMessageType('GetClusterOperationDetailsResponse', (_message.Message,), dict(
DESCRIPTOR = _GETCLUSTEROPERATIONDETAILSRESPONSE,
__module__ = 'automation_pb2'
# @@protoc_insertion_point(class_scope:automation.GetClusterOperationDetailsResponse)
))
_sym_db.RegisterMessage(GetClusterOperationDetailsResponse)
_TASK_PARAMETERSENTRY.has_options = True
_TASK_PARAMETERSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_ENQUEUECLUSTEROPERATIONREQUEST_PARAMETERSENTRY.has_options = True
_ENQUEUECLUSTEROPERATIONREQUEST_PARAMETERSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
# @@protoc_insertion_point(module_scope)

Просмотреть файл

@ -0,0 +1,98 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: automationservice.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import automation_pb2 as automation__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='automationservice.proto',
package='automationservice',
syntax='proto3',
serialized_pb=_b('\n\x17\x61utomationservice.proto\x12\x11\x61utomationservice\x1a\x10\x61utomation.proto2\x81\x02\n\nAutomation\x12t\n\x17\x45nqueueClusterOperation\x12*.automation.EnqueueClusterOperationRequest\x1a+.automation.EnqueueClusterOperationResponse\"\x00\x12}\n\x1aGetClusterOperationDetails\x12-.automation.GetClusterOperationDetailsRequest\x1a..automation.GetClusterOperationDetailsResponse\"\x00\x62\x06proto3')
,
dependencies=[automation__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
class EarlyAdopterAutomationServicer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def EnqueueClusterOperation(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def GetClusterOperationDetails(self, request, context):
raise NotImplementedError()
class EarlyAdopterAutomationServer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def start(self):
raise NotImplementedError()
@abc.abstractmethod
def stop(self):
raise NotImplementedError()
class EarlyAdopterAutomationStub(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def EnqueueClusterOperation(self, request):
raise NotImplementedError()
EnqueueClusterOperation.async = None
@abc.abstractmethod
def GetClusterOperationDetails(self, request):
raise NotImplementedError()
GetClusterOperationDetails.async = None
def early_adopter_create_Automation_server(servicer, port, private_key=None, certificate_chain=None):
import automation_pb2
import automation_pb2
import automation_pb2
import automation_pb2
method_service_descriptions = {
"EnqueueClusterOperation": utilities.unary_unary_service_description(
servicer.EnqueueClusterOperation,
automation_pb2.EnqueueClusterOperationRequest.FromString,
automation_pb2.EnqueueClusterOperationResponse.SerializeToString,
),
"GetClusterOperationDetails": utilities.unary_unary_service_description(
servicer.GetClusterOperationDetails,
automation_pb2.GetClusterOperationDetailsRequest.FromString,
automation_pb2.GetClusterOperationDetailsResponse.SerializeToString,
),
}
return implementations.server("automationservice.Automation", method_service_descriptions, port, private_key=private_key, certificate_chain=certificate_chain)
def early_adopter_create_Automation_stub(host, port, metadata_transformer=None, secure=False, root_certificates=None, private_key=None, certificate_chain=None, server_host_override=None):
import automation_pb2
import automation_pb2
import automation_pb2
import automation_pb2
method_invocation_descriptions = {
"EnqueueClusterOperation": utilities.unary_unary_invocation_description(
automation_pb2.EnqueueClusterOperationRequest.SerializeToString,
automation_pb2.EnqueueClusterOperationResponse.FromString,
),
"GetClusterOperationDetails": utilities.unary_unary_invocation_description(
automation_pb2.GetClusterOperationDetailsRequest.SerializeToString,
automation_pb2.GetClusterOperationDetailsResponse.FromString,
),
}
return implementations.stub("automationservice.Automation", method_invocation_descriptions, host, port, metadata_transformer=metadata_transformer, secure=secure, root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain, server_host_override=server_host_override)
# @@protoc_insertion_point(module_scope)

Просмотреть файл

@ -0,0 +1,638 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: binlogdata.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import query_pb2 as query__pb2
import topodata_pb2 as topodata__pb2
import replicationdata_pb2 as replicationdata__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='binlogdata.proto',
package='binlogdata',
syntax='proto3',
serialized_pb=_b('\n\x10\x62inlogdata.proto\x12\nbinlogdata\x1a\x0bquery.proto\x1a\x0etopodata.proto\x1a\x15replicationdata.proto\"7\n\x07\x43harset\x12\x0e\n\x06\x63lient\x18\x01 \x01(\x05\x12\x0c\n\x04\x63onn\x18\x02 \x01(\x05\x12\x0e\n\x06server\x18\x03 \x01(\x05\"\xe9\x02\n\x11\x42inlogTransaction\x12;\n\nstatements\x18\x01 \x03(\x0b\x32\'.binlogdata.BinlogTransaction.Statement\x12\x11\n\ttimestamp\x18\x02 \x01(\x03\x12\x0c\n\x04gtid\x18\x03 \x01(\t\x1a\xf5\x01\n\tStatement\x12\x42\n\x08\x63\x61tegory\x18\x01 \x01(\x0e\x32\x30.binlogdata.BinlogTransaction.Statement.Category\x12$\n\x07\x63harset\x18\x03 \x01(\x0b\x32\x13.binlogdata.Charset\x12\x0b\n\x03sql\x18\x02 \x01(\x0c\"q\n\x08\x43\x61tegory\x12\x13\n\x0f\x42L_UNRECOGNIZED\x10\x00\x12\x0c\n\x08\x42L_BEGIN\x10\x01\x12\r\n\tBL_COMMIT\x10\x02\x12\x0f\n\x0b\x42L_ROLLBACK\x10\x03\x12\n\n\x06\x42L_DML\x10\x04\x12\n\n\x06\x42L_DDL\x10\x05\x12\n\n\x06\x42L_SET\x10\x06\"\x91\x02\n\x0bStreamEvent\x12\x32\n\x08\x63\x61tegory\x18\x01 \x01(\x0e\x32 .binlogdata.StreamEvent.Category\x12\x12\n\ntable_name\x18\x02 \x01(\t\x12(\n\x12primary_key_fields\x18\x03 \x03(\x0b\x32\x0c.query.Field\x12&\n\x12primary_key_values\x18\x04 \x03(\x0b\x32\n.query.Row\x12\x0b\n\x03sql\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\x03\x12\x0c\n\x04gtid\x18\x07 \x01(\t\":\n\x08\x43\x61tegory\x12\n\n\x06SE_ERR\x10\x00\x12\n\n\x06SE_DML\x10\x01\x12\n\n\x06SE_DDL\x10\x02\x12\n\n\x06SE_POS\x10\x03\"B\n\x13StreamUpdateRequest\x12+\n\x08position\x18\x01 \x01(\x0b\x32\x19.replicationdata.Position\"E\n\x14StreamUpdateResponse\x12-\n\x0cstream_event\x18\x01 \x01(\x0b\x32\x17.binlogdata.StreamEvent\"\xc5\x01\n\x15StreamKeyRangeRequest\x12+\n\x08position\x18\x01 \x01(\x0b\x32\x19.replicationdata.Position\x12\x32\n\x10keyspace_id_type\x18\x02 \x01(\x0e\x32\x18.topodata.KeyspaceIdType\x12%\n\tkey_range\x18\x03 \x01(\x0b\x32\x12.topodata.KeyRange\x12$\n\x07\x63harset\x18\x04 \x01(\x0b\x32\x13.binlogdata.Charset\"S\n\x16StreamKeyRangeResponse\x12\x39\n\x12\x62inlog_transaction\x18\x01 \x01(\x0b\x32\x1d.binlogdata.BinlogTransaction\"x\n\x13StreamTablesRequest\x12+\n\x08position\x18\x01 \x01(\x0b\x32\x19.replicationdata.Position\x12\x0e\n\x06tables\x18\x02 \x03(\t\x12$\n\x07\x63harset\x18\x03 \x01(\x0b\x32\x13.binlogdata.Charset\"Q\n\x14StreamTablesResponse\x12\x39\n\x12\x62inlog_transaction\x18\x01 \x01(\x0b\x32\x1d.binlogdata.BinlogTransactionb\x06proto3')
,
dependencies=[query__pb2.DESCRIPTOR,topodata__pb2.DESCRIPTOR,replicationdata__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_BINLOGTRANSACTION_STATEMENT_CATEGORY = _descriptor.EnumDescriptor(
name='Category',
full_name='binlogdata.BinlogTransaction.Statement.Category',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='BL_UNRECOGNIZED', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BL_BEGIN', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BL_COMMIT', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BL_ROLLBACK', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BL_DML', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BL_DDL', index=5, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BL_SET', index=6, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=390,
serialized_end=503,
)
_sym_db.RegisterEnumDescriptor(_BINLOGTRANSACTION_STATEMENT_CATEGORY)
_STREAMEVENT_CATEGORY = _descriptor.EnumDescriptor(
name='Category',
full_name='binlogdata.StreamEvent.Category',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SE_ERR', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SE_DML', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SE_DDL', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SE_POS', index=3, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=721,
serialized_end=779,
)
_sym_db.RegisterEnumDescriptor(_STREAMEVENT_CATEGORY)
_CHARSET = _descriptor.Descriptor(
name='Charset',
full_name='binlogdata.Charset',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='client', full_name='binlogdata.Charset.client', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='conn', full_name='binlogdata.Charset.conn', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='server', full_name='binlogdata.Charset.server', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=84,
serialized_end=139,
)
_BINLOGTRANSACTION_STATEMENT = _descriptor.Descriptor(
name='Statement',
full_name='binlogdata.BinlogTransaction.Statement',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='category', full_name='binlogdata.BinlogTransaction.Statement.category', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='charset', full_name='binlogdata.BinlogTransaction.Statement.charset', index=1,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sql', full_name='binlogdata.BinlogTransaction.Statement.sql', index=2,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_BINLOGTRANSACTION_STATEMENT_CATEGORY,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=258,
serialized_end=503,
)
_BINLOGTRANSACTION = _descriptor.Descriptor(
name='BinlogTransaction',
full_name='binlogdata.BinlogTransaction',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='statements', full_name='binlogdata.BinlogTransaction.statements', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='timestamp', full_name='binlogdata.BinlogTransaction.timestamp', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gtid', full_name='binlogdata.BinlogTransaction.gtid', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_BINLOGTRANSACTION_STATEMENT, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=142,
serialized_end=503,
)
_STREAMEVENT = _descriptor.Descriptor(
name='StreamEvent',
full_name='binlogdata.StreamEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='category', full_name='binlogdata.StreamEvent.category', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='table_name', full_name='binlogdata.StreamEvent.table_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='primary_key_fields', full_name='binlogdata.StreamEvent.primary_key_fields', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='primary_key_values', full_name='binlogdata.StreamEvent.primary_key_values', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sql', full_name='binlogdata.StreamEvent.sql', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='timestamp', full_name='binlogdata.StreamEvent.timestamp', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gtid', full_name='binlogdata.StreamEvent.gtid', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_STREAMEVENT_CATEGORY,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=506,
serialized_end=779,
)
_STREAMUPDATEREQUEST = _descriptor.Descriptor(
name='StreamUpdateRequest',
full_name='binlogdata.StreamUpdateRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='position', full_name='binlogdata.StreamUpdateRequest.position', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=781,
serialized_end=847,
)
_STREAMUPDATERESPONSE = _descriptor.Descriptor(
name='StreamUpdateResponse',
full_name='binlogdata.StreamUpdateResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='stream_event', full_name='binlogdata.StreamUpdateResponse.stream_event', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=849,
serialized_end=918,
)
_STREAMKEYRANGEREQUEST = _descriptor.Descriptor(
name='StreamKeyRangeRequest',
full_name='binlogdata.StreamKeyRangeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='position', full_name='binlogdata.StreamKeyRangeRequest.position', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keyspace_id_type', full_name='binlogdata.StreamKeyRangeRequest.keyspace_id_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='key_range', full_name='binlogdata.StreamKeyRangeRequest.key_range', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='charset', full_name='binlogdata.StreamKeyRangeRequest.charset', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=921,
serialized_end=1118,
)
_STREAMKEYRANGERESPONSE = _descriptor.Descriptor(
name='StreamKeyRangeResponse',
full_name='binlogdata.StreamKeyRangeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='binlog_transaction', full_name='binlogdata.StreamKeyRangeResponse.binlog_transaction', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1120,
serialized_end=1203,
)
_STREAMTABLESREQUEST = _descriptor.Descriptor(
name='StreamTablesRequest',
full_name='binlogdata.StreamTablesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='position', full_name='binlogdata.StreamTablesRequest.position', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tables', full_name='binlogdata.StreamTablesRequest.tables', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='charset', full_name='binlogdata.StreamTablesRequest.charset', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1205,
serialized_end=1325,
)
_STREAMTABLESRESPONSE = _descriptor.Descriptor(
name='StreamTablesResponse',
full_name='binlogdata.StreamTablesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='binlog_transaction', full_name='binlogdata.StreamTablesResponse.binlog_transaction', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1327,
serialized_end=1408,
)
_BINLOGTRANSACTION_STATEMENT.fields_by_name['category'].enum_type = _BINLOGTRANSACTION_STATEMENT_CATEGORY
_BINLOGTRANSACTION_STATEMENT.fields_by_name['charset'].message_type = _CHARSET
_BINLOGTRANSACTION_STATEMENT.containing_type = _BINLOGTRANSACTION
_BINLOGTRANSACTION_STATEMENT_CATEGORY.containing_type = _BINLOGTRANSACTION_STATEMENT
_BINLOGTRANSACTION.fields_by_name['statements'].message_type = _BINLOGTRANSACTION_STATEMENT
_STREAMEVENT.fields_by_name['category'].enum_type = _STREAMEVENT_CATEGORY
_STREAMEVENT.fields_by_name['primary_key_fields'].message_type = query__pb2._FIELD
_STREAMEVENT.fields_by_name['primary_key_values'].message_type = query__pb2._ROW
_STREAMEVENT_CATEGORY.containing_type = _STREAMEVENT
_STREAMUPDATEREQUEST.fields_by_name['position'].message_type = replicationdata__pb2._POSITION
_STREAMUPDATERESPONSE.fields_by_name['stream_event'].message_type = _STREAMEVENT
_STREAMKEYRANGEREQUEST.fields_by_name['position'].message_type = replicationdata__pb2._POSITION
_STREAMKEYRANGEREQUEST.fields_by_name['keyspace_id_type'].enum_type = topodata__pb2._KEYSPACEIDTYPE
_STREAMKEYRANGEREQUEST.fields_by_name['key_range'].message_type = topodata__pb2._KEYRANGE
_STREAMKEYRANGEREQUEST.fields_by_name['charset'].message_type = _CHARSET
_STREAMKEYRANGERESPONSE.fields_by_name['binlog_transaction'].message_type = _BINLOGTRANSACTION
_STREAMTABLESREQUEST.fields_by_name['position'].message_type = replicationdata__pb2._POSITION
_STREAMTABLESREQUEST.fields_by_name['charset'].message_type = _CHARSET
_STREAMTABLESRESPONSE.fields_by_name['binlog_transaction'].message_type = _BINLOGTRANSACTION
DESCRIPTOR.message_types_by_name['Charset'] = _CHARSET
DESCRIPTOR.message_types_by_name['BinlogTransaction'] = _BINLOGTRANSACTION
DESCRIPTOR.message_types_by_name['StreamEvent'] = _STREAMEVENT
DESCRIPTOR.message_types_by_name['StreamUpdateRequest'] = _STREAMUPDATEREQUEST
DESCRIPTOR.message_types_by_name['StreamUpdateResponse'] = _STREAMUPDATERESPONSE
DESCRIPTOR.message_types_by_name['StreamKeyRangeRequest'] = _STREAMKEYRANGEREQUEST
DESCRIPTOR.message_types_by_name['StreamKeyRangeResponse'] = _STREAMKEYRANGERESPONSE
DESCRIPTOR.message_types_by_name['StreamTablesRequest'] = _STREAMTABLESREQUEST
DESCRIPTOR.message_types_by_name['StreamTablesResponse'] = _STREAMTABLESRESPONSE
Charset = _reflection.GeneratedProtocolMessageType('Charset', (_message.Message,), dict(
DESCRIPTOR = _CHARSET,
__module__ = 'binlogdata_pb2'
# @@protoc_insertion_point(class_scope:binlogdata.Charset)
))
_sym_db.RegisterMessage(Charset)
BinlogTransaction = _reflection.GeneratedProtocolMessageType('BinlogTransaction', (_message.Message,), dict(
Statement = _reflection.GeneratedProtocolMessageType('Statement', (_message.Message,), dict(
DESCRIPTOR = _BINLOGTRANSACTION_STATEMENT,
__module__ = 'binlogdata_pb2'
# @@protoc_insertion_point(class_scope:binlogdata.BinlogTransaction.Statement)
))
,
DESCRIPTOR = _BINLOGTRANSACTION,
__module__ = 'binlogdata_pb2'
# @@protoc_insertion_point(class_scope:binlogdata.BinlogTransaction)
))
_sym_db.RegisterMessage(BinlogTransaction)
_sym_db.RegisterMessage(BinlogTransaction.Statement)
StreamEvent = _reflection.GeneratedProtocolMessageType('StreamEvent', (_message.Message,), dict(
DESCRIPTOR = _STREAMEVENT,
__module__ = 'binlogdata_pb2'
# @@protoc_insertion_point(class_scope:binlogdata.StreamEvent)
))
_sym_db.RegisterMessage(StreamEvent)
StreamUpdateRequest = _reflection.GeneratedProtocolMessageType('StreamUpdateRequest', (_message.Message,), dict(
DESCRIPTOR = _STREAMUPDATEREQUEST,
__module__ = 'binlogdata_pb2'
# @@protoc_insertion_point(class_scope:binlogdata.StreamUpdateRequest)
))
_sym_db.RegisterMessage(StreamUpdateRequest)
StreamUpdateResponse = _reflection.GeneratedProtocolMessageType('StreamUpdateResponse', (_message.Message,), dict(
DESCRIPTOR = _STREAMUPDATERESPONSE,
__module__ = 'binlogdata_pb2'
# @@protoc_insertion_point(class_scope:binlogdata.StreamUpdateResponse)
))
_sym_db.RegisterMessage(StreamUpdateResponse)
StreamKeyRangeRequest = _reflection.GeneratedProtocolMessageType('StreamKeyRangeRequest', (_message.Message,), dict(
DESCRIPTOR = _STREAMKEYRANGEREQUEST,
__module__ = 'binlogdata_pb2'
# @@protoc_insertion_point(class_scope:binlogdata.StreamKeyRangeRequest)
))
_sym_db.RegisterMessage(StreamKeyRangeRequest)
StreamKeyRangeResponse = _reflection.GeneratedProtocolMessageType('StreamKeyRangeResponse', (_message.Message,), dict(
DESCRIPTOR = _STREAMKEYRANGERESPONSE,
__module__ = 'binlogdata_pb2'
# @@protoc_insertion_point(class_scope:binlogdata.StreamKeyRangeResponse)
))
_sym_db.RegisterMessage(StreamKeyRangeResponse)
StreamTablesRequest = _reflection.GeneratedProtocolMessageType('StreamTablesRequest', (_message.Message,), dict(
DESCRIPTOR = _STREAMTABLESREQUEST,
__module__ = 'binlogdata_pb2'
# @@protoc_insertion_point(class_scope:binlogdata.StreamTablesRequest)
))
_sym_db.RegisterMessage(StreamTablesRequest)
StreamTablesResponse = _reflection.GeneratedProtocolMessageType('StreamTablesResponse', (_message.Message,), dict(
DESCRIPTOR = _STREAMTABLESRESPONSE,
__module__ = 'binlogdata_pb2'
# @@protoc_insertion_point(class_scope:binlogdata.StreamTablesResponse)
))
_sym_db.RegisterMessage(StreamTablesResponse)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
# @@protoc_insertion_point(module_scope)

Просмотреть файл

@ -0,0 +1,118 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: binlogservice.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import binlogdata_pb2 as binlogdata__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='binlogservice.proto',
package='binlogservice',
syntax='proto3',
serialized_pb=_b('\n\x13\x62inlogservice.proto\x12\rbinlogservice\x1a\x10\x62inlogdata.proto2\x99\x02\n\x0cUpdateStream\x12U\n\x0cStreamUpdate\x12\x1f.binlogdata.StreamUpdateRequest\x1a .binlogdata.StreamUpdateResponse\"\x00\x30\x01\x12[\n\x0eStreamKeyRange\x12!.binlogdata.StreamKeyRangeRequest\x1a\".binlogdata.StreamKeyRangeResponse\"\x00\x30\x01\x12U\n\x0cStreamTables\x12\x1f.binlogdata.StreamTablesRequest\x1a .binlogdata.StreamTablesResponse\"\x00\x30\x01\x62\x06proto3')
,
dependencies=[binlogdata__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
class EarlyAdopterUpdateStreamServicer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def StreamUpdate(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def StreamKeyRange(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def StreamTables(self, request, context):
raise NotImplementedError()
class EarlyAdopterUpdateStreamServer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def start(self):
raise NotImplementedError()
@abc.abstractmethod
def stop(self):
raise NotImplementedError()
class EarlyAdopterUpdateStreamStub(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def StreamUpdate(self, request):
raise NotImplementedError()
StreamUpdate.async = None
@abc.abstractmethod
def StreamKeyRange(self, request):
raise NotImplementedError()
StreamKeyRange.async = None
@abc.abstractmethod
def StreamTables(self, request):
raise NotImplementedError()
StreamTables.async = None
def early_adopter_create_UpdateStream_server(servicer, port, private_key=None, certificate_chain=None):
import binlogdata_pb2
import binlogdata_pb2
import binlogdata_pb2
import binlogdata_pb2
import binlogdata_pb2
import binlogdata_pb2
method_service_descriptions = {
"StreamKeyRange": utilities.unary_stream_service_description(
servicer.StreamKeyRange,
binlogdata_pb2.StreamKeyRangeRequest.FromString,
binlogdata_pb2.StreamKeyRangeResponse.SerializeToString,
),
"StreamTables": utilities.unary_stream_service_description(
servicer.StreamTables,
binlogdata_pb2.StreamTablesRequest.FromString,
binlogdata_pb2.StreamTablesResponse.SerializeToString,
),
"StreamUpdate": utilities.unary_stream_service_description(
servicer.StreamUpdate,
binlogdata_pb2.StreamUpdateRequest.FromString,
binlogdata_pb2.StreamUpdateResponse.SerializeToString,
),
}
return implementations.server("binlogservice.UpdateStream", method_service_descriptions, port, private_key=private_key, certificate_chain=certificate_chain)
def early_adopter_create_UpdateStream_stub(host, port, metadata_transformer=None, secure=False, root_certificates=None, private_key=None, certificate_chain=None, server_host_override=None):
import binlogdata_pb2
import binlogdata_pb2
import binlogdata_pb2
import binlogdata_pb2
import binlogdata_pb2
import binlogdata_pb2
method_invocation_descriptions = {
"StreamKeyRange": utilities.unary_stream_invocation_description(
binlogdata_pb2.StreamKeyRangeRequest.SerializeToString,
binlogdata_pb2.StreamKeyRangeResponse.FromString,
),
"StreamTables": utilities.unary_stream_invocation_description(
binlogdata_pb2.StreamTablesRequest.SerializeToString,
binlogdata_pb2.StreamTablesResponse.FromString,
),
"StreamUpdate": utilities.unary_stream_invocation_description(
binlogdata_pb2.StreamUpdateRequest.SerializeToString,
binlogdata_pb2.StreamUpdateResponse.FromString,
),
}
return implementations.stub("binlogservice.UpdateStream", method_invocation_descriptions, host, port, metadata_transformer=metadata_transformer, secure=secure, root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain, server_host_override=server_host_override)
# @@protoc_insertion_point(module_scope)

Просмотреть файл

315
py/vtproto/mysqlctl_pb2.py Normal file
Просмотреть файл

@ -0,0 +1,315 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: mysqlctl.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='mysqlctl.proto',
package='mysqlctl',
syntax='proto3',
serialized_pb=_b('\n\x0emysqlctl.proto\x12\x08mysqlctl\"\x0e\n\x0cStartRequest\"\x0f\n\rStartResponse\"*\n\x0fShutdownRequest\x12\x17\n\x0fwait_for_mysqld\x18\x01 \x01(\x08\"\x12\n\x10ShutdownResponse\"\x18\n\x16RunMysqlUpgradeRequest\"\x19\n\x17RunMysqlUpgradeResponse2\xe5\x01\n\x08MysqlCtl\x12:\n\x05Start\x12\x16.mysqlctl.StartRequest\x1a\x17.mysqlctl.StartResponse\"\x00\x12\x43\n\x08Shutdown\x12\x19.mysqlctl.ShutdownRequest\x1a\x1a.mysqlctl.ShutdownResponse\"\x00\x12X\n\x0fRunMysqlUpgrade\x12 .mysqlctl.RunMysqlUpgradeRequest\x1a!.mysqlctl.RunMysqlUpgradeResponse\"\x00\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_STARTREQUEST = _descriptor.Descriptor(
name='StartRequest',
full_name='mysqlctl.StartRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=28,
serialized_end=42,
)
_STARTRESPONSE = _descriptor.Descriptor(
name='StartResponse',
full_name='mysqlctl.StartResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=44,
serialized_end=59,
)
_SHUTDOWNREQUEST = _descriptor.Descriptor(
name='ShutdownRequest',
full_name='mysqlctl.ShutdownRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wait_for_mysqld', full_name='mysqlctl.ShutdownRequest.wait_for_mysqld', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=61,
serialized_end=103,
)
_SHUTDOWNRESPONSE = _descriptor.Descriptor(
name='ShutdownResponse',
full_name='mysqlctl.ShutdownResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=105,
serialized_end=123,
)
_RUNMYSQLUPGRADEREQUEST = _descriptor.Descriptor(
name='RunMysqlUpgradeRequest',
full_name='mysqlctl.RunMysqlUpgradeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=125,
serialized_end=149,
)
_RUNMYSQLUPGRADERESPONSE = _descriptor.Descriptor(
name='RunMysqlUpgradeResponse',
full_name='mysqlctl.RunMysqlUpgradeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=151,
serialized_end=176,
)
DESCRIPTOR.message_types_by_name['StartRequest'] = _STARTREQUEST
DESCRIPTOR.message_types_by_name['StartResponse'] = _STARTRESPONSE
DESCRIPTOR.message_types_by_name['ShutdownRequest'] = _SHUTDOWNREQUEST
DESCRIPTOR.message_types_by_name['ShutdownResponse'] = _SHUTDOWNRESPONSE
DESCRIPTOR.message_types_by_name['RunMysqlUpgradeRequest'] = _RUNMYSQLUPGRADEREQUEST
DESCRIPTOR.message_types_by_name['RunMysqlUpgradeResponse'] = _RUNMYSQLUPGRADERESPONSE
StartRequest = _reflection.GeneratedProtocolMessageType('StartRequest', (_message.Message,), dict(
DESCRIPTOR = _STARTREQUEST,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.StartRequest)
))
_sym_db.RegisterMessage(StartRequest)
StartResponse = _reflection.GeneratedProtocolMessageType('StartResponse', (_message.Message,), dict(
DESCRIPTOR = _STARTRESPONSE,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.StartResponse)
))
_sym_db.RegisterMessage(StartResponse)
ShutdownRequest = _reflection.GeneratedProtocolMessageType('ShutdownRequest', (_message.Message,), dict(
DESCRIPTOR = _SHUTDOWNREQUEST,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.ShutdownRequest)
))
_sym_db.RegisterMessage(ShutdownRequest)
ShutdownResponse = _reflection.GeneratedProtocolMessageType('ShutdownResponse', (_message.Message,), dict(
DESCRIPTOR = _SHUTDOWNRESPONSE,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.ShutdownResponse)
))
_sym_db.RegisterMessage(ShutdownResponse)
RunMysqlUpgradeRequest = _reflection.GeneratedProtocolMessageType('RunMysqlUpgradeRequest', (_message.Message,), dict(
DESCRIPTOR = _RUNMYSQLUPGRADEREQUEST,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.RunMysqlUpgradeRequest)
))
_sym_db.RegisterMessage(RunMysqlUpgradeRequest)
RunMysqlUpgradeResponse = _reflection.GeneratedProtocolMessageType('RunMysqlUpgradeResponse', (_message.Message,), dict(
DESCRIPTOR = _RUNMYSQLUPGRADERESPONSE,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.RunMysqlUpgradeResponse)
))
_sym_db.RegisterMessage(RunMysqlUpgradeResponse)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
class EarlyAdopterMysqlCtlServicer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def Start(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def Shutdown(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def RunMysqlUpgrade(self, request, context):
raise NotImplementedError()
class EarlyAdopterMysqlCtlServer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def start(self):
raise NotImplementedError()
@abc.abstractmethod
def stop(self):
raise NotImplementedError()
class EarlyAdopterMysqlCtlStub(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def Start(self, request):
raise NotImplementedError()
Start.async = None
@abc.abstractmethod
def Shutdown(self, request):
raise NotImplementedError()
Shutdown.async = None
@abc.abstractmethod
def RunMysqlUpgrade(self, request):
raise NotImplementedError()
RunMysqlUpgrade.async = None
def early_adopter_create_MysqlCtl_server(servicer, port, private_key=None, certificate_chain=None):
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
method_service_descriptions = {
"RunMysqlUpgrade": utilities.unary_unary_service_description(
servicer.RunMysqlUpgrade,
mysqlctl_pb2.RunMysqlUpgradeRequest.FromString,
mysqlctl_pb2.RunMysqlUpgradeResponse.SerializeToString,
),
"Shutdown": utilities.unary_unary_service_description(
servicer.Shutdown,
mysqlctl_pb2.ShutdownRequest.FromString,
mysqlctl_pb2.ShutdownResponse.SerializeToString,
),
"Start": utilities.unary_unary_service_description(
servicer.Start,
mysqlctl_pb2.StartRequest.FromString,
mysqlctl_pb2.StartResponse.SerializeToString,
),
}
return implementations.server("mysqlctl.MysqlCtl", method_service_descriptions, port, private_key=private_key, certificate_chain=certificate_chain)
def early_adopter_create_MysqlCtl_stub(host, port, metadata_transformer=None, secure=False, root_certificates=None, private_key=None, certificate_chain=None, server_host_override=None):
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
method_invocation_descriptions = {
"RunMysqlUpgrade": utilities.unary_unary_invocation_description(
mysqlctl_pb2.RunMysqlUpgradeRequest.SerializeToString,
mysqlctl_pb2.RunMysqlUpgradeResponse.FromString,
),
"Shutdown": utilities.unary_unary_invocation_description(
mysqlctl_pb2.ShutdownRequest.SerializeToString,
mysqlctl_pb2.ShutdownResponse.FromString,
),
"Start": utilities.unary_unary_invocation_description(
mysqlctl_pb2.StartRequest.SerializeToString,
mysqlctl_pb2.StartResponse.FromString,
),
}
return implementations.stub("mysqlctl.MysqlCtl", method_invocation_descriptions, host, port, metadata_transformer=metadata_transformer, secure=secure, root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain, server_host_override=server_host_override)
# @@protoc_insertion_point(module_scope)

1862
py/vtproto/query_pb2.py Normal file

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -0,0 +1,238 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: queryservice.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import query_pb2 as query__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='queryservice.proto',
package='queryservice',
syntax='proto3',
serialized_pb=_b('\n\x12queryservice.proto\x12\x0cqueryservice\x1a\x0bquery.proto2\xe9\x04\n\x05Query\x12I\n\x0cGetSessionId\x12\x1a.query.GetSessionIdRequest\x1a\x1b.query.GetSessionIdResponse\"\x00\x12:\n\x07\x45xecute\x12\x15.query.ExecuteRequest\x1a\x16.query.ExecuteResponse\"\x00\x12I\n\x0c\x45xecuteBatch\x12\x1a.query.ExecuteBatchRequest\x1a\x1b.query.ExecuteBatchResponse\"\x00\x12N\n\rStreamExecute\x12\x1b.query.StreamExecuteRequest\x1a\x1c.query.StreamExecuteResponse\"\x00\x30\x01\x12\x34\n\x05\x42\x65gin\x12\x13.query.BeginRequest\x1a\x14.query.BeginResponse\"\x00\x12\x37\n\x06\x43ommit\x12\x14.query.CommitRequest\x1a\x15.query.CommitResponse\"\x00\x12=\n\x08Rollback\x12\x16.query.RollbackRequest\x1a\x17.query.RollbackResponse\"\x00\x12\x43\n\nSplitQuery\x12\x18.query.SplitQueryRequest\x1a\x19.query.SplitQueryResponse\"\x00\x12K\n\x0cStreamHealth\x12\x1a.query.StreamHealthRequest\x1a\x1b.query.StreamHealthResponse\"\x00\x30\x01\x62\x06proto3')
,
dependencies=[query__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
class EarlyAdopterQueryServicer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def GetSessionId(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def Execute(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def ExecuteBatch(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def StreamExecute(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def Begin(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def Commit(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def Rollback(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def SplitQuery(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def StreamHealth(self, request, context):
raise NotImplementedError()
class EarlyAdopterQueryServer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def start(self):
raise NotImplementedError()
@abc.abstractmethod
def stop(self):
raise NotImplementedError()
class EarlyAdopterQueryStub(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def GetSessionId(self, request):
raise NotImplementedError()
GetSessionId.async = None
@abc.abstractmethod
def Execute(self, request):
raise NotImplementedError()
Execute.async = None
@abc.abstractmethod
def ExecuteBatch(self, request):
raise NotImplementedError()
ExecuteBatch.async = None
@abc.abstractmethod
def StreamExecute(self, request):
raise NotImplementedError()
StreamExecute.async = None
@abc.abstractmethod
def Begin(self, request):
raise NotImplementedError()
Begin.async = None
@abc.abstractmethod
def Commit(self, request):
raise NotImplementedError()
Commit.async = None
@abc.abstractmethod
def Rollback(self, request):
raise NotImplementedError()
Rollback.async = None
@abc.abstractmethod
def SplitQuery(self, request):
raise NotImplementedError()
SplitQuery.async = None
@abc.abstractmethod
def StreamHealth(self, request):
raise NotImplementedError()
StreamHealth.async = None
def early_adopter_create_Query_server(servicer, port, private_key=None, certificate_chain=None):
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
method_service_descriptions = {
"Begin": utilities.unary_unary_service_description(
servicer.Begin,
query_pb2.BeginRequest.FromString,
query_pb2.BeginResponse.SerializeToString,
),
"Commit": utilities.unary_unary_service_description(
servicer.Commit,
query_pb2.CommitRequest.FromString,
query_pb2.CommitResponse.SerializeToString,
),
"Execute": utilities.unary_unary_service_description(
servicer.Execute,
query_pb2.ExecuteRequest.FromString,
query_pb2.ExecuteResponse.SerializeToString,
),
"ExecuteBatch": utilities.unary_unary_service_description(
servicer.ExecuteBatch,
query_pb2.ExecuteBatchRequest.FromString,
query_pb2.ExecuteBatchResponse.SerializeToString,
),
"GetSessionId": utilities.unary_unary_service_description(
servicer.GetSessionId,
query_pb2.GetSessionIdRequest.FromString,
query_pb2.GetSessionIdResponse.SerializeToString,
),
"Rollback": utilities.unary_unary_service_description(
servicer.Rollback,
query_pb2.RollbackRequest.FromString,
query_pb2.RollbackResponse.SerializeToString,
),
"SplitQuery": utilities.unary_unary_service_description(
servicer.SplitQuery,
query_pb2.SplitQueryRequest.FromString,
query_pb2.SplitQueryResponse.SerializeToString,
),
"StreamExecute": utilities.unary_stream_service_description(
servicer.StreamExecute,
query_pb2.StreamExecuteRequest.FromString,
query_pb2.StreamExecuteResponse.SerializeToString,
),
"StreamHealth": utilities.unary_stream_service_description(
servicer.StreamHealth,
query_pb2.StreamHealthRequest.FromString,
query_pb2.StreamHealthResponse.SerializeToString,
),
}
return implementations.server("queryservice.Query", method_service_descriptions, port, private_key=private_key, certificate_chain=certificate_chain)
def early_adopter_create_Query_stub(host, port, metadata_transformer=None, secure=False, root_certificates=None, private_key=None, certificate_chain=None, server_host_override=None):
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
import query_pb2
method_invocation_descriptions = {
"Begin": utilities.unary_unary_invocation_description(
query_pb2.BeginRequest.SerializeToString,
query_pb2.BeginResponse.FromString,
),
"Commit": utilities.unary_unary_invocation_description(
query_pb2.CommitRequest.SerializeToString,
query_pb2.CommitResponse.FromString,
),
"Execute": utilities.unary_unary_invocation_description(
query_pb2.ExecuteRequest.SerializeToString,
query_pb2.ExecuteResponse.FromString,
),
"ExecuteBatch": utilities.unary_unary_invocation_description(
query_pb2.ExecuteBatchRequest.SerializeToString,
query_pb2.ExecuteBatchResponse.FromString,
),
"GetSessionId": utilities.unary_unary_invocation_description(
query_pb2.GetSessionIdRequest.SerializeToString,
query_pb2.GetSessionIdResponse.FromString,
),
"Rollback": utilities.unary_unary_invocation_description(
query_pb2.RollbackRequest.SerializeToString,
query_pb2.RollbackResponse.FromString,
),
"SplitQuery": utilities.unary_unary_invocation_description(
query_pb2.SplitQueryRequest.SerializeToString,
query_pb2.SplitQueryResponse.FromString,
),
"StreamExecute": utilities.unary_stream_invocation_description(
query_pb2.StreamExecuteRequest.SerializeToString,
query_pb2.StreamExecuteResponse.FromString,
),
"StreamHealth": utilities.unary_stream_invocation_description(
query_pb2.StreamHealthRequest.SerializeToString,
query_pb2.StreamHealthResponse.FromString,
),
}
return implementations.stub("queryservice.Query", method_invocation_descriptions, host, port, metadata_transformer=metadata_transformer, secure=secure, root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain, server_host_override=server_host_override)
# @@protoc_insertion_point(module_scope)

Просмотреть файл

@ -0,0 +1,349 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: replicationdata.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='replicationdata.proto',
package='replicationdata',
syntax='proto3',
serialized_pb=_b('\n\x15replicationdata.proto\x12\x0freplicationdata\"?\n\x0bMariadbGtid\x12\x0e\n\x06\x64omain\x18\x01 \x01(\r\x12\x0e\n\x06server\x18\x02 \x01(\r\x12\x10\n\x08sequence\x18\x03 \x01(\x04\"\xd7\x01\n\x0cMysqlGtidSet\x12<\n\x08uuid_set\x18\x01 \x03(\x0b\x32*.replicationdata.MysqlGtidSet.MysqlUuidSet\x1a,\n\rMysqlInterval\x12\r\n\x05\x66irst\x18\x01 \x01(\x04\x12\x0c\n\x04last\x18\x02 \x01(\x04\x1a[\n\x0cMysqlUuidSet\x12\x0c\n\x04uuid\x18\x01 \x01(\x0c\x12=\n\x08interval\x18\x02 \x03(\x0b\x32+.replicationdata.MysqlGtidSet.MysqlInterval\"u\n\x08Position\x12\x32\n\x0cmariadb_gtid\x18\x01 \x01(\x0b\x32\x1c.replicationdata.MariadbGtid\x12\x35\n\x0emysql_gtid_set\x18\x02 \x01(\x0b\x32\x1d.replicationdata.MysqlGtidSet\"\xd1\x01\n\x06Status\x12+\n\x08position\x18\x01 \x01(\x0b\x32\x19.replicationdata.Position\x12\x18\n\x10slave_io_running\x18\x02 \x01(\x08\x12\x19\n\x11slave_sql_running\x18\x03 \x01(\x08\x12\x1d\n\x15seconds_behind_master\x18\x04 \x01(\r\x12\x13\n\x0bmaster_host\x18\x05 \x01(\t\x12\x13\n\x0bmaster_port\x18\x06 \x01(\x05\x12\x1c\n\x14master_connect_retry\x18\x07 \x01(\x05\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_MARIADBGTID = _descriptor.Descriptor(
name='MariadbGtid',
full_name='replicationdata.MariadbGtid',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='domain', full_name='replicationdata.MariadbGtid.domain', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='server', full_name='replicationdata.MariadbGtid.server', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sequence', full_name='replicationdata.MariadbGtid.sequence', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=42,
serialized_end=105,
)
_MYSQLGTIDSET_MYSQLINTERVAL = _descriptor.Descriptor(
name='MysqlInterval',
full_name='replicationdata.MysqlGtidSet.MysqlInterval',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='first', full_name='replicationdata.MysqlGtidSet.MysqlInterval.first', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last', full_name='replicationdata.MysqlGtidSet.MysqlInterval.last', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=186,
serialized_end=230,
)
_MYSQLGTIDSET_MYSQLUUIDSET = _descriptor.Descriptor(
name='MysqlUuidSet',
full_name='replicationdata.MysqlGtidSet.MysqlUuidSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='uuid', full_name='replicationdata.MysqlGtidSet.MysqlUuidSet.uuid', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='interval', full_name='replicationdata.MysqlGtidSet.MysqlUuidSet.interval', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=232,
serialized_end=323,
)
_MYSQLGTIDSET = _descriptor.Descriptor(
name='MysqlGtidSet',
full_name='replicationdata.MysqlGtidSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='uuid_set', full_name='replicationdata.MysqlGtidSet.uuid_set', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_MYSQLGTIDSET_MYSQLINTERVAL, _MYSQLGTIDSET_MYSQLUUIDSET, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=108,
serialized_end=323,
)
_POSITION = _descriptor.Descriptor(
name='Position',
full_name='replicationdata.Position',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='mariadb_gtid', full_name='replicationdata.Position.mariadb_gtid', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mysql_gtid_set', full_name='replicationdata.Position.mysql_gtid_set', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=325,
serialized_end=442,
)
_STATUS = _descriptor.Descriptor(
name='Status',
full_name='replicationdata.Status',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='position', full_name='replicationdata.Status.position', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slave_io_running', full_name='replicationdata.Status.slave_io_running', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slave_sql_running', full_name='replicationdata.Status.slave_sql_running', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='seconds_behind_master', full_name='replicationdata.Status.seconds_behind_master', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='master_host', full_name='replicationdata.Status.master_host', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='master_port', full_name='replicationdata.Status.master_port', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='master_connect_retry', full_name='replicationdata.Status.master_connect_retry', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=445,
serialized_end=654,
)
_MYSQLGTIDSET_MYSQLINTERVAL.containing_type = _MYSQLGTIDSET
_MYSQLGTIDSET_MYSQLUUIDSET.fields_by_name['interval'].message_type = _MYSQLGTIDSET_MYSQLINTERVAL
_MYSQLGTIDSET_MYSQLUUIDSET.containing_type = _MYSQLGTIDSET
_MYSQLGTIDSET.fields_by_name['uuid_set'].message_type = _MYSQLGTIDSET_MYSQLUUIDSET
_POSITION.fields_by_name['mariadb_gtid'].message_type = _MARIADBGTID
_POSITION.fields_by_name['mysql_gtid_set'].message_type = _MYSQLGTIDSET
_STATUS.fields_by_name['position'].message_type = _POSITION
DESCRIPTOR.message_types_by_name['MariadbGtid'] = _MARIADBGTID
DESCRIPTOR.message_types_by_name['MysqlGtidSet'] = _MYSQLGTIDSET
DESCRIPTOR.message_types_by_name['Position'] = _POSITION
DESCRIPTOR.message_types_by_name['Status'] = _STATUS
MariadbGtid = _reflection.GeneratedProtocolMessageType('MariadbGtid', (_message.Message,), dict(
DESCRIPTOR = _MARIADBGTID,
__module__ = 'replicationdata_pb2'
# @@protoc_insertion_point(class_scope:replicationdata.MariadbGtid)
))
_sym_db.RegisterMessage(MariadbGtid)
MysqlGtidSet = _reflection.GeneratedProtocolMessageType('MysqlGtidSet', (_message.Message,), dict(
MysqlInterval = _reflection.GeneratedProtocolMessageType('MysqlInterval', (_message.Message,), dict(
DESCRIPTOR = _MYSQLGTIDSET_MYSQLINTERVAL,
__module__ = 'replicationdata_pb2'
# @@protoc_insertion_point(class_scope:replicationdata.MysqlGtidSet.MysqlInterval)
))
,
MysqlUuidSet = _reflection.GeneratedProtocolMessageType('MysqlUuidSet', (_message.Message,), dict(
DESCRIPTOR = _MYSQLGTIDSET_MYSQLUUIDSET,
__module__ = 'replicationdata_pb2'
# @@protoc_insertion_point(class_scope:replicationdata.MysqlGtidSet.MysqlUuidSet)
))
,
DESCRIPTOR = _MYSQLGTIDSET,
__module__ = 'replicationdata_pb2'
# @@protoc_insertion_point(class_scope:replicationdata.MysqlGtidSet)
))
_sym_db.RegisterMessage(MysqlGtidSet)
_sym_db.RegisterMessage(MysqlGtidSet.MysqlInterval)
_sym_db.RegisterMessage(MysqlGtidSet.MysqlUuidSet)
Position = _reflection.GeneratedProtocolMessageType('Position', (_message.Message,), dict(
DESCRIPTOR = _POSITION,
__module__ = 'replicationdata_pb2'
# @@protoc_insertion_point(class_scope:replicationdata.Position)
))
_sym_db.RegisterMessage(Position)
Status = _reflection.GeneratedProtocolMessageType('Status', (_message.Message,), dict(
DESCRIPTOR = _STATUS,
__module__ = 'replicationdata_pb2'
# @@protoc_insertion_point(class_scope:replicationdata.Status)
))
_sym_db.RegisterMessage(Status)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
# @@protoc_insertion_point(module_scope)

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

1414
py/vtproto/topodata_pb2.py Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

Просмотреть файл

2271
py/vtproto/vtgate_pb2.py Normal file

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -0,0 +1,358 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: vtgateservice.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import vtgate_pb2 as vtgate__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='vtgateservice.proto',
package='vtgateservice',
syntax='proto3',
serialized_pb=_b('\n\x13vtgateservice.proto\x12\rvtgateservice\x1a\x0cvtgate.proto2\x85\n\n\x06Vitess\x12<\n\x07\x45xecute\x12\x16.vtgate.ExecuteRequest\x1a\x17.vtgate.ExecuteResponse\"\x00\x12N\n\rExecuteShards\x12\x1c.vtgate.ExecuteShardsRequest\x1a\x1d.vtgate.ExecuteShardsResponse\"\x00\x12]\n\x12\x45xecuteKeyspaceIds\x12!.vtgate.ExecuteKeyspaceIdsRequest\x1a\".vtgate.ExecuteKeyspaceIdsResponse\"\x00\x12W\n\x10\x45xecuteKeyRanges\x12\x1f.vtgate.ExecuteKeyRangesRequest\x1a .vtgate.ExecuteKeyRangesResponse\"\x00\x12W\n\x10\x45xecuteEntityIds\x12\x1f.vtgate.ExecuteEntityIdsRequest\x1a .vtgate.ExecuteEntityIdsResponse\"\x00\x12]\n\x12\x45xecuteBatchShards\x12!.vtgate.ExecuteBatchShardsRequest\x1a\".vtgate.ExecuteBatchShardsResponse\"\x00\x12l\n\x17\x45xecuteBatchKeyspaceIds\x12&.vtgate.ExecuteBatchKeyspaceIdsRequest\x1a\'.vtgate.ExecuteBatchKeyspaceIdsResponse\"\x00\x12P\n\rStreamExecute\x12\x1c.vtgate.StreamExecuteRequest\x1a\x1d.vtgate.StreamExecuteResponse\"\x00\x30\x01\x12\x62\n\x13StreamExecuteShards\x12\".vtgate.StreamExecuteShardsRequest\x1a#.vtgate.StreamExecuteShardsResponse\"\x00\x30\x01\x12q\n\x18StreamExecuteKeyspaceIds\x12\'.vtgate.StreamExecuteKeyspaceIdsRequest\x1a(.vtgate.StreamExecuteKeyspaceIdsResponse\"\x00\x30\x01\x12k\n\x16StreamExecuteKeyRanges\x12%.vtgate.StreamExecuteKeyRangesRequest\x1a&.vtgate.StreamExecuteKeyRangesResponse\"\x00\x30\x01\x12\x36\n\x05\x42\x65gin\x12\x14.vtgate.BeginRequest\x1a\x15.vtgate.BeginResponse\"\x00\x12\x39\n\x06\x43ommit\x12\x15.vtgate.CommitRequest\x1a\x16.vtgate.CommitResponse\"\x00\x12?\n\x08Rollback\x12\x17.vtgate.RollbackRequest\x1a\x18.vtgate.RollbackResponse\"\x00\x12\x45\n\nSplitQuery\x12\x19.vtgate.SplitQueryRequest\x1a\x1a.vtgate.SplitQueryResponse\"\x00\x62\x06proto3')
,
dependencies=[vtgate__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
class EarlyAdopterVitessServicer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def Execute(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def ExecuteShards(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def ExecuteKeyspaceIds(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def ExecuteKeyRanges(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def ExecuteEntityIds(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def ExecuteBatchShards(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def ExecuteBatchKeyspaceIds(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def StreamExecute(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def StreamExecuteShards(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def StreamExecuteKeyspaceIds(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def StreamExecuteKeyRanges(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def Begin(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def Commit(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def Rollback(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def SplitQuery(self, request, context):
raise NotImplementedError()
class EarlyAdopterVitessServer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def start(self):
raise NotImplementedError()
@abc.abstractmethod
def stop(self):
raise NotImplementedError()
class EarlyAdopterVitessStub(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def Execute(self, request):
raise NotImplementedError()
Execute.async = None
@abc.abstractmethod
def ExecuteShards(self, request):
raise NotImplementedError()
ExecuteShards.async = None
@abc.abstractmethod
def ExecuteKeyspaceIds(self, request):
raise NotImplementedError()
ExecuteKeyspaceIds.async = None
@abc.abstractmethod
def ExecuteKeyRanges(self, request):
raise NotImplementedError()
ExecuteKeyRanges.async = None
@abc.abstractmethod
def ExecuteEntityIds(self, request):
raise NotImplementedError()
ExecuteEntityIds.async = None
@abc.abstractmethod
def ExecuteBatchShards(self, request):
raise NotImplementedError()
ExecuteBatchShards.async = None
@abc.abstractmethod
def ExecuteBatchKeyspaceIds(self, request):
raise NotImplementedError()
ExecuteBatchKeyspaceIds.async = None
@abc.abstractmethod
def StreamExecute(self, request):
raise NotImplementedError()
StreamExecute.async = None
@abc.abstractmethod
def StreamExecuteShards(self, request):
raise NotImplementedError()
StreamExecuteShards.async = None
@abc.abstractmethod
def StreamExecuteKeyspaceIds(self, request):
raise NotImplementedError()
StreamExecuteKeyspaceIds.async = None
@abc.abstractmethod
def StreamExecuteKeyRanges(self, request):
raise NotImplementedError()
StreamExecuteKeyRanges.async = None
@abc.abstractmethod
def Begin(self, request):
raise NotImplementedError()
Begin.async = None
@abc.abstractmethod
def Commit(self, request):
raise NotImplementedError()
Commit.async = None
@abc.abstractmethod
def Rollback(self, request):
raise NotImplementedError()
Rollback.async = None
@abc.abstractmethod
def SplitQuery(self, request):
raise NotImplementedError()
SplitQuery.async = None
def early_adopter_create_Vitess_server(servicer, port, private_key=None, certificate_chain=None):
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
method_service_descriptions = {
"Begin": utilities.unary_unary_service_description(
servicer.Begin,
vtgate_pb2.BeginRequest.FromString,
vtgate_pb2.BeginResponse.SerializeToString,
),
"Commit": utilities.unary_unary_service_description(
servicer.Commit,
vtgate_pb2.CommitRequest.FromString,
vtgate_pb2.CommitResponse.SerializeToString,
),
"Execute": utilities.unary_unary_service_description(
servicer.Execute,
vtgate_pb2.ExecuteRequest.FromString,
vtgate_pb2.ExecuteResponse.SerializeToString,
),
"ExecuteBatchKeyspaceIds": utilities.unary_unary_service_description(
servicer.ExecuteBatchKeyspaceIds,
vtgate_pb2.ExecuteBatchKeyspaceIdsRequest.FromString,
vtgate_pb2.ExecuteBatchKeyspaceIdsResponse.SerializeToString,
),
"ExecuteBatchShards": utilities.unary_unary_service_description(
servicer.ExecuteBatchShards,
vtgate_pb2.ExecuteBatchShardsRequest.FromString,
vtgate_pb2.ExecuteBatchShardsResponse.SerializeToString,
),
"ExecuteEntityIds": utilities.unary_unary_service_description(
servicer.ExecuteEntityIds,
vtgate_pb2.ExecuteEntityIdsRequest.FromString,
vtgate_pb2.ExecuteEntityIdsResponse.SerializeToString,
),
"ExecuteKeyRanges": utilities.unary_unary_service_description(
servicer.ExecuteKeyRanges,
vtgate_pb2.ExecuteKeyRangesRequest.FromString,
vtgate_pb2.ExecuteKeyRangesResponse.SerializeToString,
),
"ExecuteKeyspaceIds": utilities.unary_unary_service_description(
servicer.ExecuteKeyspaceIds,
vtgate_pb2.ExecuteKeyspaceIdsRequest.FromString,
vtgate_pb2.ExecuteKeyspaceIdsResponse.SerializeToString,
),
"ExecuteShards": utilities.unary_unary_service_description(
servicer.ExecuteShards,
vtgate_pb2.ExecuteShardsRequest.FromString,
vtgate_pb2.ExecuteShardsResponse.SerializeToString,
),
"Rollback": utilities.unary_unary_service_description(
servicer.Rollback,
vtgate_pb2.RollbackRequest.FromString,
vtgate_pb2.RollbackResponse.SerializeToString,
),
"SplitQuery": utilities.unary_unary_service_description(
servicer.SplitQuery,
vtgate_pb2.SplitQueryRequest.FromString,
vtgate_pb2.SplitQueryResponse.SerializeToString,
),
"StreamExecute": utilities.unary_stream_service_description(
servicer.StreamExecute,
vtgate_pb2.StreamExecuteRequest.FromString,
vtgate_pb2.StreamExecuteResponse.SerializeToString,
),
"StreamExecuteKeyRanges": utilities.unary_stream_service_description(
servicer.StreamExecuteKeyRanges,
vtgate_pb2.StreamExecuteKeyRangesRequest.FromString,
vtgate_pb2.StreamExecuteKeyRangesResponse.SerializeToString,
),
"StreamExecuteKeyspaceIds": utilities.unary_stream_service_description(
servicer.StreamExecuteKeyspaceIds,
vtgate_pb2.StreamExecuteKeyspaceIdsRequest.FromString,
vtgate_pb2.StreamExecuteKeyspaceIdsResponse.SerializeToString,
),
"StreamExecuteShards": utilities.unary_stream_service_description(
servicer.StreamExecuteShards,
vtgate_pb2.StreamExecuteShardsRequest.FromString,
vtgate_pb2.StreamExecuteShardsResponse.SerializeToString,
),
}
return implementations.server("vtgateservice.Vitess", method_service_descriptions, port, private_key=private_key, certificate_chain=certificate_chain)
def early_adopter_create_Vitess_stub(host, port, metadata_transformer=None, secure=False, root_certificates=None, private_key=None, certificate_chain=None, server_host_override=None):
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
import vtgate_pb2
method_invocation_descriptions = {
"Begin": utilities.unary_unary_invocation_description(
vtgate_pb2.BeginRequest.SerializeToString,
vtgate_pb2.BeginResponse.FromString,
),
"Commit": utilities.unary_unary_invocation_description(
vtgate_pb2.CommitRequest.SerializeToString,
vtgate_pb2.CommitResponse.FromString,
),
"Execute": utilities.unary_unary_invocation_description(
vtgate_pb2.ExecuteRequest.SerializeToString,
vtgate_pb2.ExecuteResponse.FromString,
),
"ExecuteBatchKeyspaceIds": utilities.unary_unary_invocation_description(
vtgate_pb2.ExecuteBatchKeyspaceIdsRequest.SerializeToString,
vtgate_pb2.ExecuteBatchKeyspaceIdsResponse.FromString,
),
"ExecuteBatchShards": utilities.unary_unary_invocation_description(
vtgate_pb2.ExecuteBatchShardsRequest.SerializeToString,
vtgate_pb2.ExecuteBatchShardsResponse.FromString,
),
"ExecuteEntityIds": utilities.unary_unary_invocation_description(
vtgate_pb2.ExecuteEntityIdsRequest.SerializeToString,
vtgate_pb2.ExecuteEntityIdsResponse.FromString,
),
"ExecuteKeyRanges": utilities.unary_unary_invocation_description(
vtgate_pb2.ExecuteKeyRangesRequest.SerializeToString,
vtgate_pb2.ExecuteKeyRangesResponse.FromString,
),
"ExecuteKeyspaceIds": utilities.unary_unary_invocation_description(
vtgate_pb2.ExecuteKeyspaceIdsRequest.SerializeToString,
vtgate_pb2.ExecuteKeyspaceIdsResponse.FromString,
),
"ExecuteShards": utilities.unary_unary_invocation_description(
vtgate_pb2.ExecuteShardsRequest.SerializeToString,
vtgate_pb2.ExecuteShardsResponse.FromString,
),
"Rollback": utilities.unary_unary_invocation_description(
vtgate_pb2.RollbackRequest.SerializeToString,
vtgate_pb2.RollbackResponse.FromString,
),
"SplitQuery": utilities.unary_unary_invocation_description(
vtgate_pb2.SplitQueryRequest.SerializeToString,
vtgate_pb2.SplitQueryResponse.FromString,
),
"StreamExecute": utilities.unary_stream_invocation_description(
vtgate_pb2.StreamExecuteRequest.SerializeToString,
vtgate_pb2.StreamExecuteResponse.FromString,
),
"StreamExecuteKeyRanges": utilities.unary_stream_invocation_description(
vtgate_pb2.StreamExecuteKeyRangesRequest.SerializeToString,
vtgate_pb2.StreamExecuteKeyRangesResponse.FromString,
),
"StreamExecuteKeyspaceIds": utilities.unary_stream_invocation_description(
vtgate_pb2.StreamExecuteKeyspaceIdsRequest.SerializeToString,
vtgate_pb2.StreamExecuteKeyspaceIdsResponse.FromString,
),
"StreamExecuteShards": utilities.unary_stream_invocation_description(
vtgate_pb2.StreamExecuteShardsRequest.SerializeToString,
vtgate_pb2.StreamExecuteShardsResponse.FromString,
),
}
return implementations.stub("vtgateservice.Vitess", method_invocation_descriptions, host, port, metadata_transformer=metadata_transformer, secure=secure, root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain, server_host_override=server_host_override)
# @@protoc_insertion_point(module_scope)

175
py/vtproto/vtrpc_pb2.py Normal file
Просмотреть файл

@ -0,0 +1,175 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: vtrpc.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='vtrpc.proto',
package='vtrpc',
syntax='proto3',
serialized_pb=_b('\n\x0bvtrpc.proto\x12\x05vtrpc\"F\n\x08\x43\x61llerID\x12\x11\n\tprincipal\x18\x01 \x01(\t\x12\x11\n\tcomponent\x18\x02 \x01(\t\x12\x14\n\x0csubcomponent\x18\x03 \x01(\t\";\n\x08RPCError\x12\x1e\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x10.vtrpc.ErrorCode\x12\x0f\n\x07message\x18\x02 \x01(\t*n\n\tErrorCode\x12\x0b\n\x07NoError\x10\x00\x12\x10\n\x0bTabletError\x10\xe8\x07\x12\x17\n\x12UnknownTabletError\x10\xcf\x0f\x12\x10\n\x0bVtgateError\x10\xd0\x0f\x12\x17\n\x12UnknownVtgateError\x10\xb7\x17\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_ERRORCODE = _descriptor.EnumDescriptor(
name='ErrorCode',
full_name='vtrpc.ErrorCode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NoError', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TabletError', index=1, number=1000,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UnknownTabletError', index=2, number=1999,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='VtgateError', index=3, number=2000,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UnknownVtgateError', index=4, number=2999,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=155,
serialized_end=265,
)
_sym_db.RegisterEnumDescriptor(_ERRORCODE)
ErrorCode = enum_type_wrapper.EnumTypeWrapper(_ERRORCODE)
NoError = 0
TabletError = 1000
UnknownTabletError = 1999
VtgateError = 2000
UnknownVtgateError = 2999
_CALLERID = _descriptor.Descriptor(
name='CallerID',
full_name='vtrpc.CallerID',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='principal', full_name='vtrpc.CallerID.principal', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='component', full_name='vtrpc.CallerID.component', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='subcomponent', full_name='vtrpc.CallerID.subcomponent', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22,
serialized_end=92,
)
_RPCERROR = _descriptor.Descriptor(
name='RPCError',
full_name='vtrpc.RPCError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='vtrpc.RPCError.code', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='vtrpc.RPCError.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=94,
serialized_end=153,
)
_RPCERROR.fields_by_name['code'].enum_type = _ERRORCODE
DESCRIPTOR.message_types_by_name['CallerID'] = _CALLERID
DESCRIPTOR.message_types_by_name['RPCError'] = _RPCERROR
DESCRIPTOR.enum_types_by_name['ErrorCode'] = _ERRORCODE
CallerID = _reflection.GeneratedProtocolMessageType('CallerID', (_message.Message,), dict(
DESCRIPTOR = _CALLERID,
__module__ = 'vtrpc_pb2'
# @@protoc_insertion_point(class_scope:vtrpc.CallerID)
))
_sym_db.RegisterMessage(CallerID)
RPCError = _reflection.GeneratedProtocolMessageType('RPCError', (_message.Message,), dict(
DESCRIPTOR = _RPCERROR,
__module__ = 'vtrpc_pb2'
# @@protoc_insertion_point(class_scope:vtrpc.RPCError)
))
_sym_db.RegisterMessage(RPCError)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
# @@protoc_insertion_point(module_scope)

Просмотреть файл

@ -0,0 +1,114 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: vtworkerdata.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import logutil_pb2 as logutil__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='vtworkerdata.proto',
package='vtworkerdata',
syntax='proto3',
serialized_pb=_b('\n\x12vtworkerdata.proto\x12\x0cvtworkerdata\x1a\rlogutil.proto\"-\n\x1d\x45xecuteVtworkerCommandRequest\x12\x0c\n\x04\x61rgs\x18\x01 \x03(\t\"?\n\x1e\x45xecuteVtworkerCommandResponse\x12\x1d\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x0e.logutil.Eventb\x06proto3')
,
dependencies=[logutil__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_EXECUTEVTWORKERCOMMANDREQUEST = _descriptor.Descriptor(
name='ExecuteVtworkerCommandRequest',
full_name='vtworkerdata.ExecuteVtworkerCommandRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='args', full_name='vtworkerdata.ExecuteVtworkerCommandRequest.args', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=51,
serialized_end=96,
)
_EXECUTEVTWORKERCOMMANDRESPONSE = _descriptor.Descriptor(
name='ExecuteVtworkerCommandResponse',
full_name='vtworkerdata.ExecuteVtworkerCommandResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='event', full_name='vtworkerdata.ExecuteVtworkerCommandResponse.event', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=98,
serialized_end=161,
)
_EXECUTEVTWORKERCOMMANDRESPONSE.fields_by_name['event'].message_type = logutil__pb2._EVENT
DESCRIPTOR.message_types_by_name['ExecuteVtworkerCommandRequest'] = _EXECUTEVTWORKERCOMMANDREQUEST
DESCRIPTOR.message_types_by_name['ExecuteVtworkerCommandResponse'] = _EXECUTEVTWORKERCOMMANDRESPONSE
ExecuteVtworkerCommandRequest = _reflection.GeneratedProtocolMessageType('ExecuteVtworkerCommandRequest', (_message.Message,), dict(
DESCRIPTOR = _EXECUTEVTWORKERCOMMANDREQUEST,
__module__ = 'vtworkerdata_pb2'
# @@protoc_insertion_point(class_scope:vtworkerdata.ExecuteVtworkerCommandRequest)
))
_sym_db.RegisterMessage(ExecuteVtworkerCommandRequest)
ExecuteVtworkerCommandResponse = _reflection.GeneratedProtocolMessageType('ExecuteVtworkerCommandResponse', (_message.Message,), dict(
DESCRIPTOR = _EXECUTEVTWORKERCOMMANDRESPONSE,
__module__ = 'vtworkerdata_pb2'
# @@protoc_insertion_point(class_scope:vtworkerdata.ExecuteVtworkerCommandResponse)
))
_sym_db.RegisterMessage(ExecuteVtworkerCommandResponse)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
# @@protoc_insertion_point(module_scope)

Просмотреть файл

@ -0,0 +1,78 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: vtworkerservice.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import vtworkerdata_pb2 as vtworkerdata__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='vtworkerservice.proto',
package='vtworkerservice',
syntax='proto3',
serialized_pb=_b('\n\x15vtworkerservice.proto\x12\x0fvtworkerservice\x1a\x12vtworkerdata.proto2\x83\x01\n\x08Vtworker\x12w\n\x16\x45xecuteVtworkerCommand\x12+.vtworkerdata.ExecuteVtworkerCommandRequest\x1a,.vtworkerdata.ExecuteVtworkerCommandResponse\"\x00\x30\x01\x62\x06proto3')
,
dependencies=[vtworkerdata__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
class EarlyAdopterVtworkerServicer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def ExecuteVtworkerCommand(self, request, context):
raise NotImplementedError()
class EarlyAdopterVtworkerServer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def start(self):
raise NotImplementedError()
@abc.abstractmethod
def stop(self):
raise NotImplementedError()
class EarlyAdopterVtworkerStub(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def ExecuteVtworkerCommand(self, request):
raise NotImplementedError()
ExecuteVtworkerCommand.async = None
def early_adopter_create_Vtworker_server(servicer, port, private_key=None, certificate_chain=None):
import vtworkerdata_pb2
import vtworkerdata_pb2
method_service_descriptions = {
"ExecuteVtworkerCommand": utilities.unary_stream_service_description(
servicer.ExecuteVtworkerCommand,
vtworkerdata_pb2.ExecuteVtworkerCommandRequest.FromString,
vtworkerdata_pb2.ExecuteVtworkerCommandResponse.SerializeToString,
),
}
return implementations.server("vtworkerservice.Vtworker", method_service_descriptions, port, private_key=private_key, certificate_chain=certificate_chain)
def early_adopter_create_Vtworker_stub(host, port, metadata_transformer=None, secure=False, root_certificates=None, private_key=None, certificate_chain=None, server_host_override=None):
import vtworkerdata_pb2
import vtworkerdata_pb2
method_invocation_descriptions = {
"ExecuteVtworkerCommand": utilities.unary_stream_invocation_description(
vtworkerdata_pb2.ExecuteVtworkerCommandRequest.SerializeToString,
vtworkerdata_pb2.ExecuteVtworkerCommandResponse.FromString,
),
}
return implementations.stub("vtworkerservice.Vtworker", method_invocation_descriptions, host, port, metadata_transformer=metadata_transformer, secure=secure, root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain, server_host_override=server_host_override)
# @@protoc_insertion_point(module_scope)

Просмотреть файл

@ -18,7 +18,7 @@ import utils
from mysql_flavor import mysql_flavor
from vtdb import keyrange_constants
from vtdb import update_stream_service
from vtdb import update_stream
src_master = tablet.Tablet()
src_replica = tablet.Tablet()
@ -146,8 +146,8 @@ def tearDownModule():
def _get_update_stream(tblt):
return update_stream_service.UpdateStreamConnection('localhost:%u' %
tblt.port, 30)
protocol, endpoint = tblt.update_stream_python_endpoint()
return update_stream.connect(protocol, endpoint, 30)
class TestBinlog(unittest.TestCase):
@ -168,13 +168,9 @@ class TestBinlog(unittest.TestCase):
# Wait for it to replicate.
stream = _get_update_stream(dst_replica)
stream.dial()
data = stream.stream_start(start_position)
while data:
if data['Category'] == 'POS':
for stream_event in stream.stream_update(start_position):
if stream_event.category == update_stream.StreamEvent.POS:
break
data = stream.stream_next()
stream.close()
# Check the value.
data = dst_master.mquery("vt_test_keyspace",
@ -202,16 +198,13 @@ class TestBinlog(unittest.TestCase):
# Look for it using update stream to see if binlog streamer can talk to
# dst_replica, which now has binlog_checksum enabled.
stream = _get_update_stream(dst_replica)
stream.dial()
data = stream.stream_start(start_position)
found = False
while data:
if data['Category'] == 'POS':
for stream_event in stream.stream_update(start_position):
if stream_event.category == update_stream.StreamEvent.POS:
break
if data['Sql'] == sql:
if stream_event.sql == sql:
found = True
break
data = stream.stream_next()
stream.close()
self.assertEqual(found, True, 'expected query not found in update stream')
@ -233,16 +226,13 @@ class TestBinlog(unittest.TestCase):
# Look for it using update stream to see if binlog streamer can talk to
# dst_replica, which now has binlog_checksum disabled.
stream = _get_update_stream(dst_replica)
stream.dial()
data = stream.stream_start(start_position)
found = False
while data:
if data['Category'] == 'POS':
for stream_event in stream.stream_update(start_position):
if stream_event.category == update_stream.StreamEvent.POS:
break
if data['Sql'] == sql:
if stream_event.sql == sql:
found = True
break
data = stream.stream_next()
stream.close()
self.assertEqual(found, True, 'expected query not found in update stream')

Просмотреть файл

@ -8,6 +8,9 @@ class GoRpcProtocolsFlavor(protocols_flavor.ProtocolsFlavor):
def binlog_player_protocol(self):
return 'gorpc'
def binlog_player_python_protocol(self):
return 'gorpc'
def vtctl_client_protocol(self):
return 'gorpc'

Просмотреть файл

@ -9,6 +9,9 @@ class GRpcProtocolsFlavor(protocols_flavor.ProtocolsFlavor):
def binlog_player_protocol(self):
return 'grpc'
def binlog_player_python_protocol(self):
return 'grpc'
def vtctl_client_protocol(self):
return 'grpc'

Просмотреть файл

@ -6,7 +6,13 @@ class ProtocolsFlavor(object):
"""Base class for protocols"""
def binlog_player_protocol(self):
"""Returns the name of the binlog player protocol to use."""
"""Returns the name of the binlog player protocol to use
between vttablets, in go."""
raise NotImplementedError('Not implemented in the base class')
def binlog_player_python_protocol(self):
"""Returns the name of the binlog player protocol to use
for python connections to update_stream service."""
raise NotImplementedError('Not implemented in the base class')
def vtctl_client_protocol(self):

Просмотреть файл

@ -94,6 +94,17 @@ class Tablet(object):
self.zk_tablet_path = (
'/zk/test_%s/vt/tablets/%010d' % (self.cell, self.tablet_uid))
def update_stream_python_endpoint(self):
protocol = protocols_flavor().binlog_player_python_protocol()
port = self.port
if protocol == "gorpc":
from vtdb import gorpc_update_stream
elif protocol == "grpc":
# import the grpc update stream client implementation, change the port
from vtdb import grpc_update_stream
port = self.grpc_port
return (protocol, 'localhost:%u' % port)
def mysqlctl(self, cmd, extra_my_cnf=None, with_ports=False, verbose=False):
extra_env = {}
all_extra_my_cnf = get_all_extra_my_cnf(extra_my_cnf)

Просмотреть файл

@ -19,7 +19,7 @@ import tablet
import utils
from vtdb import dbexceptions
from vtdb import topology
from vtdb import update_stream_service
from vtdb import update_stream
from vtdb import vtclient
from zk import zkocc
from mysql_flavor import mysql_flavor
@ -151,11 +151,12 @@ class TestUpdateStream(unittest.TestCase):
self.vtgate_client.close()
def _get_master_stream_conn(self):
return update_stream_service.UpdateStreamConnection(master_host, 30)
protocol, endpoint = master_tablet.update_stream_python_endpoint()
return update_stream.connect(protocol, endpoint, 30)
def _get_replica_stream_conn(self):
return update_stream_service.UpdateStreamConnection('localhost:%u' %
replica_tablet.port, 30)
protocol, endpoint = replica_tablet.update_stream_python_endpoint()
return update_stream.connect(protocol, endpoint, 30)
def _test_service_disabled(self):
start_position = _get_repl_current_position()
@ -164,18 +165,14 @@ class TestUpdateStream(unittest.TestCase):
self._exec_vt_txn(['delete from vt_insert_test'])
utils.run_vtctl(['ChangeSlaveType', replica_tablet.tablet_alias, 'spare'])
utils.wait_for_tablet_type(replica_tablet.tablet_alias, 'spare')
replica_conn = self._get_replica_stream_conn()
logging.debug('dialing replica update stream service')
replica_conn.dial()
replica_conn = self._get_replica_stream_conn()
try:
data = replica_conn.stream_start(start_position)
for stream_event in replica_conn.stream_update(start_position):
break
except Exception, e:
logging.debug(str(e))
if str(e) == 'update stream service is not enabled':
logging.debug('Test Service Disabled: Pass')
else:
self.fail(
'Test Service Disabled: Fail - did not throw the correct exception')
self.assertIn('update stream service is not enabled', str(e))
v = utils.get_vars(replica_tablet.port)
if v['UpdateStreamState'] != 'Disabled':
@ -198,13 +195,10 @@ class TestUpdateStream(unittest.TestCase):
thd.daemon = True
thd.start()
replica_conn = self._get_replica_stream_conn()
replica_conn.dial()
try:
data = replica_conn.stream_start(start_position)
for i in xrange(10):
data = replica_conn.stream_next()
if data['Category'] == 'DML' and utils.options.verbose == 2:
for stream_event in replica_conn.stream_update(start_position):
if stream_event.category == update_stream.StreamEvent.DML:
logging.debug('Test Service Enabled: Pass')
break
except Exception, e:
@ -222,19 +216,19 @@ class TestUpdateStream(unittest.TestCase):
logging.debug('Testing enable -> disable switch starting @ %s',
start_position)
replica_conn = self._get_replica_stream_conn()
replica_conn.dial()
disabled_err = False
first = True
txn_count = 0
try:
data = replica_conn.stream_start(start_position)
utils.run_vtctl(['ChangeSlaveType', replica_tablet.tablet_alias, 'spare'])
utils.wait_for_tablet_type(replica_tablet.tablet_alias, 'spare', 30)
while data:
data = replica_conn.stream_next()
if data is not None and data['Category'] == 'POS':
txn_count += 1
logging.debug('Test Service Switch: FAIL')
return
for stream_event in replica_conn.stream_update(start_position):
if first:
utils.run_vtctl(['ChangeSlaveType', replica_tablet.tablet_alias, 'spare'])
utils.wait_for_tablet_type(replica_tablet.tablet_alias, 'spare', 30)
first = False
else:
if stream_event.category == update_stream.StreamEvent.POS:
txn_count += 1
logging.debug('Test Service Switch: FAIL')
return
except dbexceptions.DatabaseError, e:
self.assertEqual(
'Fatal Service Error: Disconnecting because the Update Stream '
@ -246,27 +240,24 @@ class TestUpdateStream(unittest.TestCase):
self.fail("Update stream returned error '%s'" % str(e))
logging.debug('Streamed %d transactions before exiting', txn_count)
def _vtdb_conn(self, host):
conn = vtclient.VtOCCConnection(self.vtgate_client, 'test_keyspace', '0',
'master', 30)
conn.connect()
return conn
def _exec_vt_txn(self, query_list=None):
if not query_list:
return
vtdb_conn = self._vtdb_conn('localhost:%u' % master_tablet.port)
vtdb_conn = vtclient.VtOCCConnection(self.vtgate_client,
'test_keyspace', '0', 'master', 30)
vtdb_conn.connect()
vtdb_cursor = vtdb_conn.cursor()
vtdb_conn.begin()
for q in query_list:
vtdb_cursor.execute(q, {})
vtdb_conn.commit()
# The function below checks the parity of streams received
# from master and replica for the same writes. Also tests
# transactions are retrieved properly.
def test_stream_parity(self):
timeout = 30#s
"""test_stream_parity checks the parity of streams received
from master and replica for the same writes. Also tests
transactions are retrieved properly.
"""
timeout = 30
while True:
master_start_position = _get_master_current_position()
replica_start_position = _get_repl_current_position()
@ -285,25 +276,17 @@ class TestUpdateStream(unittest.TestCase):
self._exec_vt_txn(['delete from vt_a'])
self._exec_vt_txn(['delete from vt_b'])
master_conn = self._get_master_stream_conn()
master_conn.dial()
master_events = []
data = master_conn.stream_start(master_start_position)
master_events.append(data)
for i in xrange(21):
data = master_conn.stream_next()
master_events.append(data)
if data['Category'] == 'POS':
for stream_event in master_conn.stream_update(master_start_position):
master_events.append(stream_event)
if stream_event.category == update_stream.StreamEvent.POS:
master_txn_count += 1
break
replica_events = []
replica_conn = self._get_replica_stream_conn()
replica_conn.dial()
data = replica_conn.stream_start(replica_start_position)
replica_events.append(data)
for i in xrange(21):
data = replica_conn.stream_next()
replica_events.append(data)
if data['Category'] == 'POS':
for stream_event in replica_conn.stream_update(replica_start_position):
replica_events.append(stream_event)
if stream_event.category == update_stream.StreamEvent.POS:
replica_txn_count += 1
break
if len(master_events) != len(replica_events):
@ -311,8 +294,8 @@ class TestUpdateStream(unittest.TestCase):
'Test Failed - # of records mismatch, master %s replica %s',
master_events, replica_events)
for master_val, replica_val in zip(master_events, replica_events):
master_data = master_val
replica_data = replica_val
master_data = master_val.__dict__
replica_data = replica_val.__dict__
self.assertEqual(
master_data, replica_data,
"Test failed, data mismatch - master '%s' and replica position '%s'" %
@ -324,28 +307,26 @@ class TestUpdateStream(unittest.TestCase):
start_position = master_start_position
logging.debug('test_ddl: starting @ %s', start_position)
master_conn = self._get_master_stream_conn()
master_conn.dial()
data = master_conn.stream_start(start_position)
self.assertEqual(data['Sql'], _create_vt_insert_test,
"DDL didn't match original")
for stream_event in master_conn.stream_update(start_position):
self.assertEqual(stream_event.sql, _create_vt_insert_test,
"DDL didn't match original")
return
self.fail("didn't get right sql")
def test_set_insert_id(self):
start_position = _get_master_current_position()
self._exec_vt_txn(['SET INSERT_ID=1000000'] + self._populate_vt_insert_test)
logging.debug('test_set_insert_id: starting @ %s', start_position)
master_conn = self._get_master_stream_conn()
master_conn.dial()
data = master_conn.stream_start(start_position)
expected_id = 1000000
while data:
if data['Category'] == 'POS':
for stream_event in master_conn.stream_update(start_position):
if stream_event.category == update_stream.StreamEvent.POS:
break
self.assertEqual(data['Fields'][0], 'id')
self.assertEqual(data['PkRows'][0][0], expected_id)
self.assertEqual(stream_event.fields[0], 'id')
self.assertEqual(stream_event.rows[0][0], expected_id)
expected_id += 1
data = master_conn.stream_next()
if expected_id != 1000004:
self.fail("did not get my foru values!")
self.fail("did not get my four values!")
def test_database_filter(self):
start_position = _get_master_current_position()
@ -353,18 +334,15 @@ class TestUpdateStream(unittest.TestCase):
self._exec_vt_txn(self._populate_vt_insert_test)
logging.debug('test_database_filter: starting @ %s', start_position)
master_conn = self._get_master_stream_conn()
master_conn.dial()
data = master_conn.stream_start(start_position)
while data:
if data['Category'] == 'POS':
for stream_event in master_conn.stream_update(start_position):
if stream_event.category == update_stream.StreamEvent.POS:
break
self.assertNotEqual(
data['Category'], 'DDL',
stream_event.category, update_stream.StreamEvent.DDL,
"query using other_database wasn't filted out")
data = master_conn.stream_next()
# This tests the service switch from disable -> enable -> disable
def test_service_switch(self):
"""tests the service switch from disable -> enable -> disable"""
self._test_service_disabled()
self._test_service_enabled()
# The above tests leaves the service in disabled state, hence enabling it.
@ -378,19 +356,18 @@ class TestUpdateStream(unittest.TestCase):
self._exec_vt_txn(self._populate_vt_a(15))
self._exec_vt_txn(['delete from vt_a'])
master_conn = self._get_master_stream_conn()
master_conn.dial()
data = master_conn.stream_start(start_position)
master_txn_count = 0
logs_correct = False
while master_txn_count <= 2:
data = master_conn.stream_next()
if data['Category'] == 'POS':
for stream_event in master_conn.stream_update(start_position):
if stream_event.category == update_stream.StreamEvent.POS:
master_txn_count += 1
position = mysql_flavor().position_append(position, data['GTIDField'])
position = mysql_flavor().position_append(position, stream_event.position)
if mysql_flavor().position_after(position, start_position):
logs_correct = True
logging.debug('Log rotation correctly interpreted')
break
if master_txn_count == 2:
self.fail("ran out of logs")
if not logs_correct:
self.fail("Flush logs didn't get properly interpreted")

Просмотреть файл

@ -593,7 +593,7 @@ def run_vtctl(clargs, auto_log=False, expect_fail=False,
return result, ""
elif mode == VTCTL_RPC:
logging.debug("vtctl: %s", " ".join(clargs))
result = vtctld_connection.execute_vtctl_command(clargs, info_to_debug=True, action_timeout=120)
result = vtctl_client.execute_vtctl_command(vtctld_connection, clargs, info_to_debug=True, action_timeout=120)
return result, ""
raise Exception('Unknown mode: %s', mode)