// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: BulkDelete.proto
package org.apache.hadoop.hbase.coprocessor.example.generated;
public final class BulkDeleteProtos {
private BulkDeleteProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface BulkDeleteRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .Scan scan = 1;
boolean hasScan();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
// required .BulkDeleteRequest.DeleteType deleteType = 2;
boolean hasDeleteType();
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType();
// optional uint64 timestamp = 3;
boolean hasTimestamp();
long getTimestamp();
// required uint32 rowBatchSize = 4;
boolean hasRowBatchSize();
int getRowBatchSize();
}
public static final class BulkDeleteRequest extends
com.google.protobuf.GeneratedMessage
implements BulkDeleteRequestOrBuilder {
// Use BulkDeleteRequest.newBuilder() to construct.
private BulkDeleteRequest(Builder builder) {
super(builder);
}
private BulkDeleteRequest(boolean noInit) {}
private static final BulkDeleteRequest defaultInstance;
public static BulkDeleteRequest getDefaultInstance() {
return defaultInstance;
}
public BulkDeleteRequest getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_fieldAccessorTable;
}
public enum DeleteType
implements com.google.protobuf.ProtocolMessageEnum {
ROW(0, 0),
FAMILY(1, 1),
COLUMN(2, 2),
VERSION(3, 3),
;
public static final int ROW_VALUE = 0;
public static final int FAMILY_VALUE = 1;
public static final int COLUMN_VALUE = 2;
public static final int VERSION_VALUE = 3;
public final int getNumber() { return value; }
public static DeleteType valueOf(int value) {
switch (value) {
case 0: return ROW;
case 1: return FAMILY;
case 2: return COLUMN;
case 3: return VERSION;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<DeleteType>() {
public DeleteType findValueByNumber(int number) {
return DeleteType.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDescriptor().getEnumTypes().get(0);
}
private static final DeleteType[] VALUES = {
ROW, FAMILY, COLUMN, VERSION,
};
public static DeleteType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private DeleteType(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:BulkDeleteRequest.DeleteType)
}
private int bitField0_;
// required .Scan scan = 1;
public static final int SCAN_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
public boolean hasScan() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
return scan_;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
return scan_;
}
// required .BulkDeleteRequest.DeleteType deleteType = 2;
public static final int DELETETYPE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_;
public boolean hasDeleteType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() {
return deleteType_;
}
// optional uint64 timestamp = 3;
public static final int TIMESTAMP_FIELD_NUMBER = 3;
private long timestamp_;
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
public long getTimestamp() {
return timestamp_;
}
// required uint32 rowBatchSize = 4;
public static final int ROWBATCHSIZE_FIELD_NUMBER = 4;
private int rowBatchSize_;
public boolean hasRowBatchSize() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
public int getRowBatchSize() {
return rowBatchSize_;
}
private void initFields() {
scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
timestamp_ = 0L;
rowBatchSize_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasScan()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasDeleteType()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasRowBatchSize()) {
memoizedIsInitialized = 0;
return false;
}
if (!getScan().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeMessage(1, scan_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeEnum(2, deleteType_.getNumber());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt64(3, timestamp_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeUInt32(4, rowBatchSize_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, scan_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, deleteType_.getNumber());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(3, timestamp_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt32Size(4, rowBatchSize_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) obj;
boolean result = true;
result = result && (hasScan() == other.hasScan());
if (hasScan()) {
result = result && getScan()
.equals(other.getScan());
}
result = result && (hasDeleteType() == other.hasDeleteType());
if (hasDeleteType()) {
result = result &&
(getDeleteType() == other.getDeleteType());
}
result = result && (hasTimestamp() == other.hasTimestamp());
if (hasTimestamp()) {
result = result && (getTimestamp()
== other.getTimestamp());
}
result = result && (hasRowBatchSize() == other.hasRowBatchSize());
if (hasRowBatchSize()) {
result = result && (getRowBatchSize()
== other.getRowBatchSize());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
@java.lang.Override
public int hashCode() {
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasScan()) {
hash = (37 * hash) + SCAN_FIELD_NUMBER;
hash = (53 * hash) + getScan().hashCode();
}
if (hasDeleteType()) {
hash = (37 * hash) + DELETETYPE_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getDeleteType());
}
if (hasTimestamp()) {
hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getTimestamp());
}
if (hasRowBatchSize()) {
hash = (37 * hash) + ROWBATCHSIZE_FIELD_NUMBER;
hash = (53 * hash) + getRowBatchSize();
}
hash = (29 * hash) + getUnknownFields().hashCode();
return hash;
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getScanFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (scanBuilder_ == null) {
scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
} else {
scanBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
bitField0_ = (bitField0_ & ~0x00000002);
timestamp_ = 0L;
bitField0_ = (bitField0_ & ~0x00000004);
rowBatchSize_ = 0;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDescriptor();
}
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest build() {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest buildPartial() {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
if (scanBuilder_ == null) {
result.scan_ = scan_;
} else {
result.scan_ = scanBuilder_.build();
}
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.deleteType_ = deleteType_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.timestamp_ = timestamp_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.rowBatchSize_ = rowBatchSize_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) {
return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other) {
if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance()) return this;
if (other.hasScan()) {
mergeScan(other.getScan());
}
if (other.hasDeleteType()) {
setDeleteType(other.getDeleteType());
}
if (other.hasTimestamp()) {
setTimestamp(other.getTimestamp());
}
if (other.hasRowBatchSize()) {
setRowBatchSize(other.getRowBatchSize());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasScan()) {
return false;
}
if (!hasDeleteType()) {
return false;
}
if (!hasRowBatchSize()) {
return false;
}
if (!getScan().isInitialized()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder();
if (hasScan()) {
subBuilder.mergeFrom(getScan());
}
input.readMessage(subBuilder, extensionRegistry);
setScan(subBuilder.buildPartial());
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
deleteType_ = value;
}
break;
}
case 24: {
bitField0_ |= 0x00000004;
timestamp_ = input.readUInt64();
break;
}
case 32: {
bitField0_ |= 0x00000008;
rowBatchSize_ = input.readUInt32();
break;
}
}
}
}
private int bitField0_;
// required .Scan scan = 1;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
public boolean hasScan() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
if (scanBuilder_ == null) {
return scan_;
} else {
return scanBuilder_.getMessage();
}
}
public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
if (scanBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
scan_ = value;
onChanged();
} else {
scanBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
return this;
}
public Builder setScan(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
if (scanBuilder_ == null) {
scan_ = builderForValue.build();
onChanged();
} else {
scanBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
return this;
}
public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
if (scanBuilder_ == null) {
if (((bitField0_ & 0x00000001) == 0x00000001) &&
scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
scan_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
} else {
scan_ = value;
}
onChanged();
} else {
scanBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000001;
return this;
}
public Builder clearScan() {
if (scanBuilder_ == null) {
scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
onChanged();
} else {
scanBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getScanFieldBuilder().getBuilder();
}
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
if (scanBuilder_ != null) {
return scanBuilder_.getMessageOrBuilder();
} else {
return scan_;
}
}
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
getScanFieldBuilder() {
if (scanBuilder_ == null) {
scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
scan_,
getParentForChildren(),
isClean());
scan_ = null;
}
return scanBuilder_;
}
// required .BulkDeleteRequest.DeleteType deleteType = 2;
private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
public boolean hasDeleteType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() {
return deleteType_;
}
public Builder setDeleteType(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
deleteType_ = value;
onChanged();
return this;
}
public Builder clearDeleteType() {
bitField0_ = (bitField0_ & ~0x00000002);
deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
onChanged();
return this;
}
// optional uint64 timestamp = 3;
private long timestamp_ ;
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
public long getTimestamp() {
return timestamp_;
}
public Builder setTimestamp(long value) {
bitField0_ |= 0x00000004;
timestamp_ = value;
onChanged();
return this;
}
public Builder clearTimestamp() {
bitField0_ = (bitField0_ & ~0x00000004);
timestamp_ = 0L;
onChanged();
return this;
}
// required uint32 rowBatchSize = 4;
private int rowBatchSize_ ;
public boolean hasRowBatchSize() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
public int getRowBatchSize() {
return rowBatchSize_;
}
public Builder setRowBatchSize(int value) {
bitField0_ |= 0x00000008;
rowBatchSize_ = value;
onChanged();
return this;
}
public Builder clearRowBatchSize() {
bitField0_ = (bitField0_ & ~0x00000008);
rowBatchSize_ = 0;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:BulkDeleteRequest)
}
static {
defaultInstance = new BulkDeleteRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:BulkDeleteRequest)
}
public interface BulkDeleteResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required uint64 rowsDeleted = 1;
boolean hasRowsDeleted();
long getRowsDeleted();
// optional uint64 versionsDeleted = 2;
boolean hasVersionsDeleted();
long getVersionsDeleted();
}
public static final class BulkDeleteResponse extends
com.google.protobuf.GeneratedMessage
implements BulkDeleteResponseOrBuilder {
// Use BulkDeleteResponse.newBuilder() to construct.
private BulkDeleteResponse(Builder builder) {
super(builder);
}
private BulkDeleteResponse(boolean noInit) {}
private static final BulkDeleteResponse defaultInstance;
public static BulkDeleteResponse getDefaultInstance() {
return defaultInstance;
}
public BulkDeleteResponse getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_fieldAccessorTable;
}
private int bitField0_;
// required uint64 rowsDeleted = 1;
public static final int ROWSDELETED_FIELD_NUMBER = 1;
private long rowsDeleted_;
public boolean hasRowsDeleted() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public long getRowsDeleted() {
return rowsDeleted_;
}
// optional uint64 versionsDeleted = 2;
public static final int VERSIONSDELETED_FIELD_NUMBER = 2;
private long versionsDeleted_;
public boolean hasVersionsDeleted() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
public long getVersionsDeleted() {
return versionsDeleted_;
}
private void initFields() {
rowsDeleted_ = 0L;
versionsDeleted_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasRowsDeleted()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeUInt64(1, rowsDeleted_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeUInt64(2, versionsDeleted_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(1, rowsDeleted_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeUInt64Size(2, versionsDeleted_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) obj;
boolean result = true;
result = result && (hasRowsDeleted() == other.hasRowsDeleted());
if (hasRowsDeleted()) {
result = result && (getRowsDeleted()
== other.getRowsDeleted());
}
result = result && (hasVersionsDeleted() == other.hasVersionsDeleted());
if (hasVersionsDeleted()) {
result = result && (getVersionsDeleted()
== other.getVersionsDeleted());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
@java.lang.Override
public int hashCode() {
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasRowsDeleted()) {
hash = (37 * hash) + ROWSDELETED_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getRowsDeleted());
}
if (hasVersionsDeleted()) {
hash = (37 * hash) + VERSIONSDELETED_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getVersionsDeleted());
}
hash = (29 * hash) + getUnknownFields().hashCode();
return hash;
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
rowsDeleted_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
versionsDeleted_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDescriptor();
}
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse build() {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse buildPartial() {
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.rowsDeleted_ = rowsDeleted_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.versionsDeleted_ = versionsDeleted_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) {
return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other) {
if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()) return this;
if (other.hasRowsDeleted()) {
setRowsDeleted(other.getRowsDeleted());
}
if (other.hasVersionsDeleted()) {
setVersionsDeleted(other.getVersionsDeleted());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasRowsDeleted()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
rowsDeleted_ = input.readUInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
versionsDeleted_ = input.readUInt64();
break;
}
}
}
}
private int bitField0_;
// required uint64 rowsDeleted = 1;
private long rowsDeleted_ ;
public boolean hasRowsDeleted() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public long getRowsDeleted() {
return rowsDeleted_;
}
public Builder setRowsDeleted(long value) {
bitField0_ |= 0x00000001;
rowsDeleted_ = value;
onChanged();
return this;
}
public Builder clearRowsDeleted() {
bitField0_ = (bitField0_ & ~0x00000001);
rowsDeleted_ = 0L;
onChanged();
return this;
}
// optional uint64 versionsDeleted = 2;
private long versionsDeleted_ ;
public boolean hasVersionsDeleted() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
public long getVersionsDeleted() {
return versionsDeleted_;
}
public Builder setVersionsDeleted(long value) {
bitField0_ |= 0x00000002;
versionsDeleted_ = value;
onChanged();
return this;
}
public Builder clearVersionsDeleted() {
bitField0_ = (bitField0_ & ~0x00000002);
versionsDeleted_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:BulkDeleteResponse)
}
static {
defaultInstance = new BulkDeleteResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:BulkDeleteResponse)
}
public static abstract class BulkDeleteService
implements com.google.protobuf.Service {
protected BulkDeleteService() {}
public interface Interface {
public abstract void delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse> done);
}
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new BulkDeleteService() {
@java.lang.Override
public void delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse> done) {
impl.delete(controller, request, done);
}
};
}
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request)
throws com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.delete(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
public abstract void delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse> done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.getDescriptor().getServices().get(0);
}
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request,
com.google.protobuf.RpcCallback<
com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.delete(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request,
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse>specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.RpcChannel channel;
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance());
}
}
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_BulkDeleteRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_BulkDeleteRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_BulkDeleteResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_BulkDeleteResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\020BulkDelete.proto\032\014Client.proto\"\300\001\n\021Bul" +
"kDeleteRequest\022\023\n\004scan\030\001 \002(\0132\005.Scan\0221\n\nd" +
"eleteType\030\002 \002(\0162\035.BulkDeleteRequest.Dele" +
"teType\022\021\n\ttimestamp\030\003 \001(\004\022\024\n\014rowBatchSiz" +
"e\030\004 \002(\r\":\n\nDeleteType\022\007\n\003ROW\020\000\022\n\n\006FAMILY" +
"\020\001\022\n\n\006COLUMN\020\002\022\013\n\007VERSION\020\003\"B\n\022BulkDelet" +
"eResponse\022\023\n\013rowsDeleted\030\001 \002(\004\022\027\n\017versio" +
"nsDeleted\030\002 \001(\0042F\n\021BulkDeleteService\0221\n\006" +
"delete\022\022.BulkDeleteRequest\032\023.BulkDeleteR" +
"esponseBQ\n5org.apache.hadoop.hbase.copro",
"cessor.example.generatedB\020BulkDeleteProt" +
"osH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_BulkDeleteRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_BulkDeleteRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_BulkDeleteRequest_descriptor,
new java.lang.String[] { "Scan", "DeleteType", "Timestamp", "RowBatchSize", },
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class);
internal_static_BulkDeleteResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_BulkDeleteResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_BulkDeleteResponse_descriptor,
new java.lang.String[] { "RowsDeleted", "VersionsDeleted", },
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class);
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}