Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

first commit

  • Loading branch information...
commit b109155e8b106b5a4d17c87811340e889b31c09e 0 parents
@gideondk authored
Showing with 3,688 additions and 0 deletions.
  1. BIN  .cache
  2. +13 −0 .classpath
  3. +6 −0 .gitignore
  4. +12 −0 .project
  5. +2 −0  .settings/org.eclipse.core.resources.prefs
  6. +36 −0 .settings/org.scala-ide.sdt.core.prefs
  7. +174 −0 LICENSE
  8. 0  README.md
  9. +40 −0 build.sbt
  10. +1 −0  project/build.properties
  11. +3 −0  project/plugins.sbt
  12. +5 −0 sbt
  13. BIN  sbt-launch.jar
  14. +4 −0 src/main/resources/application.conf
  15. +64 −0 src/main/scala/com/basho/riak/protobuf/Enum.scala
  16. +169 −0 src/main/scala/com/basho/riak/protobuf/Message.scala
  17. +1,708 −0 src/main/scala/com/basho/riak/protobuf/RiakKvPB.scala
  18. +209 −0 src/main/scala/com/basho/riak/protobuf/RiakPB.scala
  19. +115 −0 src/main/scala/nl/gideondk/raiku/Actors.scala
  20. +60 −0 src/main/scala/nl/gideondk/raiku/Messages.scala
  21. +95 −0 src/main/scala/nl/gideondk/raiku/ProtoBufConversion.scala
  22. +88 −0 src/main/scala/nl/gideondk/raiku/RWObject.scala
  23. +27 −0 src/main/scala/nl/gideondk/raiku/Raiku.scala
  24. +162 −0 src/main/scala/nl/gideondk/raiku/RaikuBucket.scala
  25. +49 −0 src/main/scala/nl/gideondk/raiku/Request.scala
  26. +63 −0 src/main/scala/nl/gideondk/raiku/ValidatedFutureIO.scala
  27. +36 −0 src/main/scala/nl/gideondk/raiku/package.scala
  28. +249 −0 src/test/scala/nl/gideondk/raiku/BucketAdvTest.scala
  29. +142 −0 src/test/scala/nl/gideondk/raiku/BucketTest.scala
  30. +64 −0 src/test/scala/nl/gideondk/raiku/PerfTest.scala
  31. +92 −0 src/test/scala/nl/gideondk/raiku/RWObjectTest.scala
BIN  .cache
Binary file not shown
13 .classpath
@@ -0,0 +1,13 @@
+<classpath>
+ <classpathentry output="target/scala-2.10/classes" path="src/main/scala" kind="src"></classpathentry>
+ <classpathentry output="target/scala-2.10/classes" path="src/main/java" kind="src"></classpathentry>
+ <classpathentry output="target/scala-2.10/test-classes" path="src/test/scala" kind="src"></classpathentry>
+ <classpathentry output="target/scala-2.10/test-classes" path="src/test/java" kind="src"></classpathentry>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"></classpathentry>
+ <classpathentry path="/Users/gideondk/.ivy2/cache/org.scalaz/scalaz-core_2.10.0-RC2/jars/scalaz-core_2.10.0-RC2-7.0.0-M4.jar" kind="lib"></classpathentry>
+ <classpathentry path="/Users/gideondk/.ivy2/cache/com.google.protobuf/protobuf-java/jars/protobuf-java-2.4.1.jar" kind="lib"></classpathentry>
+ <classpathentry path="/Users/gideondk/.ivy2/cache/com.typesafe.akka/akka-actor_2.10.0-RC2/bundles/akka-actor_2.10.0-RC2-2.1.0-RC2.jar" kind="lib"></classpathentry>
+ <classpathentry path="/Users/gideondk/.ivy2/cache/com.typesafe/config/bundles/config-1.0.0.jar" kind="lib"></classpathentry>
+ <classpathentry path="org.eclipse.jdt.launching.JRE_CONTAINER" kind="con"></classpathentry>
+ <classpathentry path="bin" kind="output"></classpathentry>
+</classpath>
6 .gitignore
@@ -0,0 +1,6 @@
+.idea
+.idea_modules
+*.iml
+target
+.DS_Store
+.ensime*
12 .project
@@ -0,0 +1,12 @@
+<projectDescription>
+ <name>raiku</name>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+</projectDescription>
2  .settings/org.eclipse.core.resources.prefs
@@ -0,0 +1,2 @@
+eclipse.preferences.version=1
+encoding/<project>=UTF-8
36 .settings/org.scala-ide.sdt.core.prefs
@@ -0,0 +1,36 @@
+Xcheck-null=false
+Xcheckinit=false
+Xdisable-assertions=false
+Xelide-below=-2147483648
+Xexperimental=false
+Xfatal-warnings=false
+Xfuture=false
+Xlog-implicits=false
+Xmigration=false
+Xno-uescape=false
+Xpluginsdir=/Applications/eclipse/configuration/org.eclipse.osgi/bundles/239/2/.cp/lib
+Ybuild-manager-debug=false
+Yno-generic-signatures=false
+Yno-imports=false
+Ypresentation-debug=false
+Ypresentation-delay=0
+Ypresentation-verbose=false
+Yrecursion=0
+Yself-in-annots=false
+Ystruct-dispatch=poly-cache
+Ywarn-dead-code=false
+buildmanager=sbt
+compileorder=Mixed
+deprecation=false
+eclipse.preferences.version=1
+explaintypes=false
+g=vars
+no-specialization=false
+nowarn=false
+optimise=false
+scala.compiler.additionalParams=-Dfile.encoding\=UTF-8
+scala.compiler.useProjectSettings=true
+stopBuildOnError=true
+target=jvm-1.5
+unchecked=false
+verbose=false
174 LICENSE
@@ -0,0 +1,174 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
0  README.md
No changes.
40 build.sbt
@@ -0,0 +1,40 @@
+name := "raiku"
+
+organization := "nl.gideondk"
+
+version := "0.1"
+
+scalaVersion := "2.10.0-RC2"
+
+crossScalaVersions := Seq("2.10.0-RC2")
+
+parallelExecution in Test := false
+
+resolvers ++= Seq("Typesafe Repository (releases)" at "http://repo.typesafe.com/typesafe/releases/",
+ "gideondk-repo" at "https://raw.github.com/gideondk/gideondk-mvn-repo/master",
+ "Scala Tools Repository (snapshots)" at "http://scala-tools.org/repo-snapshots",
+ "Scala Tools Repository (releases)" at "http://scala-tools.org/repo-releases",
+ "Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
+ "repo.codahale.com" at "http://repo.codahale.com",
+ "spray repo" at "http://repo.spray.io"
+)
+
+libraryDependencies ++= Seq(
+// "org.specs2" %% "specs2" % "1.9" % "test" withSources(),
+// "commons-pool" % "commons-pool" % "1.5.6" withSources(),
+ "org.scalaz" % "scalaz-core_2.10.0-RC2" % "7.0.0-M4" withSources(),
+ "org.scalaz" % "scalaz-effect_2.10.0-RC2" % "7.0.0-M4" withSources(),
+ "com.google.protobuf" % "protobuf-java" % "2.4.1" withSources(),
+ "org.specs2" % "specs2_2.10.0-RC2" % "1.12.2",
+// "com.stackmob" % "scaliak_2.9.2" % "0.3-FUTURES",
+// "net.debasishg" % "sjsonapp_2.9.2" % "0.1.1-scalaz-seven",
+// "org.scalatest" %% "scalatest" % "1.7.2" % "test",
+// "io.backchat.jerkson" % "jerkson_2.9.2" % "0.7.0",
+// "net.databinder.dispatch" %% "dispatch-core" % "0.9.4",
+// "io.spray" %% "spray-json" % "1.2.2",
+// "io.spray" % "spray-client" % "1.0-M5"
+ "io.spray" %% "spray-json" % "1.2.2" cross CrossVersion.full,
+ "com.typesafe.akka" %% "akka-actor" % "2.1.0-RC2" cross CrossVersion.full
+)
+
+logBuffered := false
1  project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.12.1
3  project/plugins.sbt
@@ -0,0 +1,3 @@
+addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.1.0")
+
+addSbtPlugin("org.ensime" % "ensime-sbt-cmd" % "0.1.0")
5 sbt
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+SCALA="-XX:MaxPermSize=256m -Xms1G -Xmx1G -Xss8M"
+java $SCALA -jar `dirname $0`/sbt-launch.jar "$@"
+
BIN  sbt-launch.jar
Binary file not shown
4 src/main/resources/application.conf
@@ -0,0 +1,4 @@
+akka {
+ loglevel = WARNING
+ stdout-loglevel = WARNING
+}
64 src/main/scala/com/basho/riak/protobuf/Enum.scala
@@ -0,0 +1,64 @@
+package com.basho.riak.protobuf
+
+/**
+ * Viktor Klang's Enum, modified for ScalaBuff for protobuf usage
+ * Source: https://gist.github.com/1057513/
+ */
+trait Enum {
+
+ import java.util.concurrent.atomic.AtomicReference
+
+ type EnumVal <: Value
+
+ implicit def _enumToInt(_e: EnumVal) = _e.id
+
+ private val _values = new AtomicReference(Vector[EnumVal]())
+
+ /**
+ * Add an EnumVal to our storage, using CCAS to make sure it's thread safe, returns the ordinal.
+ */
+ private final def addEnumVal(newVal: EnumVal): Int = {
+ import _values.{ get, compareAndSet CAS }
+ val oldVec = get
+ val newVec = oldVec :+ newVal
+ if ((get eq oldVec) && CAS(oldVec, newVec)) newVec.indexWhere(_ eq newVal) else addEnumVal(newVal)
+ }
+
+ /**
+ * Get all the enums that exist for this type.
+ */
+ def values: Vector[EnumVal] = _values.get
+
+ protected trait Value extends com.google.protobuf.Internal.EnumLite {
+ self: EnumVal // Enforce that no one mixes in Value in a non-EnumVal type
+ final val ordinal = addEnumVal(this) // Adds the EnumVal and returns the ordinal
+
+ // proto enum value
+ val id: Int
+ // proto enum name
+ val name: String
+
+ lazy val getNumber = id
+
+ override def toString = name
+ /**
+ * Enum Values with identical values are equal.
+ */
+ override def equals(other: Any) = other.isInstanceOf[Value] && this.id == other.asInstanceOf[Value].id
+ /**
+ * Enum Values with identical values return the same hashCode.
+ */
+ override def hashCode = 31 * (this.getClass.## + name.## + id)
+ }
+
+}
+
+/**
+ * Thrown when an unknown enum number is passed to the valueOf method of an Enum.
+ */
+class UnknownEnumException(enumID: Int) extends RuntimeException("Unknown enum ID: " + enumID)
+
+/**
+ * Thrown when a required field with enum type is uninitialized on access attempt.
+ */
+class UninitializedEnumException[T](name: String) extends RuntimeException("Enum not initialized: " + name)
169 src/main/scala/com/basho/riak/protobuf/Message.scala
@@ -0,0 +1,169 @@
+package com.basho.riak.protobuf
+
+import com.google.protobuf._
+import java.io.{ FilterInputStream, InputStream }
+
+/**
+ * Message trait for messages generated with ScalaBuff.
+ * Ordinarily Messages would have GeneratedMessageLite.Builder mixed in, but since it's a Java class, we can't do that.
+ * Contains methods implementing the MessageLite.Builder Java interface, similar to ones in GeneratedMessageLite.Builder.
+ *
+ * @author Sandro Gržičić
+ */
+trait Message[MessageType <: MessageLite with MessageLite.Builder] extends MessageLite.Builder {
+
+ implicit def _anyToOption[T](any: T): Option[T] = Option[T](any)
+
+ implicit def _stringToByteString(string: String) = ByteString.copyFromUtf8(string)
+
+ def mergeFrom(message: MessageType): MessageType
+
+ def getDefaultInstanceForType: MessageType
+
+ def isInitialized: Boolean
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream)
+
+ def mergeFrom(input: CodedInputStream, extensionRegistry: ExtensionRegistryLite): MessageType
+
+ def mergeFrom(input: CodedInputStream): MessageType = mergeFrom(input, ExtensionRegistryLite.getEmptyRegistry)
+
+ def mergeFrom(data: ByteString): MessageType = {
+ val input = data.newCodedInput
+ val merged = mergeFrom(input)
+ input.checkLastTagWas(0)
+ merged
+ }
+
+ def mergeFrom(data: ByteString, extensionRegistry: ExtensionRegistryLite): MessageType = {
+ val input = data.newCodedInput
+ val merged = mergeFrom(input, extensionRegistry)
+ input.checkLastTagWas(0)
+ merged
+ }
+
+ def mergeFrom(data: Array[Byte]): MessageType = mergeFrom(data, 0, data.length)
+
+ def mergeFrom(data: Array[Byte], offset: Int, length: Int): MessageType = {
+ val input = CodedInputStream.newInstance(data, offset, length)
+ val merged = mergeFrom(input)
+ input.checkLastTagWas(0)
+ merged
+ }
+
+ def mergeFrom(data: Array[Byte], extensionRegistry: ExtensionRegistryLite): MessageType = mergeFrom(data, 0, data.length, extensionRegistry)
+
+ def mergeFrom(data: Array[Byte], off: Int, len: Int, extensionRegistry: ExtensionRegistryLite): MessageType = {
+ val input = CodedInputStream.newInstance(data, off, len)
+ val merged = mergeFrom(input, extensionRegistry)
+ input.checkLastTagWas(0)
+ merged
+ }
+
+ def mergeFrom(input: InputStream): MessageType = {
+ val codedInput = CodedInputStream.newInstance(input)
+ val merged = mergeFrom(codedInput)
+ codedInput.checkLastTagWas(0)
+ merged
+ }
+
+ def mergeFrom(input: InputStream, extensionRegistry: ExtensionRegistryLite): MessageType = {
+ val codedInput = CodedInputStream.newInstance(input)
+ val merged = mergeFrom(codedInput, extensionRegistry)
+ codedInput.checkLastTagWas(0)
+ merged
+ }
+
+ def mergeDelimitedFrom(input: InputStream, extensionRegistry: ExtensionRegistryLite) = {
+ val firstByte = input.read
+ if (firstByte != -1) {
+ val size = CodedInputStream.readRawVarint32(firstByte, input)
+ val limitedInput = new LimitedInputStream(input, size)
+ mergeFrom(limitedInput, extensionRegistry)
+ true
+ } else {
+ false
+ }
+ }
+
+ def mergeDelimitedFrom(input: InputStream) = {
+ mergeDelimitedFrom(input, ExtensionRegistryLite.getEmptyRegistry)
+ }
+
+ def mergeDelimitedFromStream(input: InputStream, extensionRegistry: ExtensionRegistryLite): Option[MessageType] = {
+ val firstByte = input.read
+ if (firstByte != -1) {
+ val size = CodedInputStream.readRawVarint32(firstByte, input)
+ val limitedInput = new LimitedInputStream(input, size)
+ Some(mergeFrom(limitedInput, extensionRegistry))
+ } else {
+ None
+ }
+ }
+
+ def mergeDelimitedFromStream(input: InputStream): Option[MessageType] = {
+ mergeDelimitedFromStream(input, ExtensionRegistryLite.getEmptyRegistry)
+ }
+
+ /**
+ * See {@link com.google.protobuf.AbstractMessageLite.Builder#LimitedInputStream}.
+ */
+ private final class LimitedInputStream(
+ val inputStream: InputStream, private var limit: Int) extends FilterInputStream(inputStream) {
+
+ override def available = scala.math.min(super.available, limit)
+
+ override def read = {
+ if (limit > 0) {
+ val result = super.read
+ if (result >= 0) {
+ limit -= 1
+ }
+ result
+ } else {
+ -1
+ }
+ }
+
+ override def read(bytes: Array[Byte], offset: Int, length: Int) = {
+ if (limit > 0) {
+ val limitedLength = scala.math.min(length, limit)
+ val result = super.read(bytes, offset, limitedLength)
+ if (result >= 0) {
+ limit -= result
+ }
+ result
+ } else {
+ -1
+ }
+ }
+
+ override def skip(n: Long) = {
+ val result = super.skip(scala.math.min(n, limit))
+ if (result >= 0) {
+ limit = (limit - result).toInt
+ }
+ result
+ }
+ }
+
+ /**
+ * See {@link com.google.protobuf.CodedInputStream#readMessage}.
+ *
+ * CodedInputStream#readMessage attempts to mutate the passed Builder and discards the returned value,
+ * which we need, since our "Builders" (Messages) return a new instance whenever a mutation is performed.
+ */
+ def readMessage[ReadMessageType <: MessageLite.Builder](in: CodedInputStream, message: ReadMessageType, extensionRegistry: ExtensionRegistryLite) = {
+ val length = in.readRawVarint32()
+ val oldLimit = in.pushLimit(length)
+
+ val newMessage = message.mergeFrom(in, extensionRegistry).asInstanceOf[ReadMessageType]
+
+ in.checkLastTagWas(0)
+ in.popLimit(oldLimit)
+
+ newMessage
+ }
+
+}
+
1,708 src/main/scala/com/basho/riak/protobuf/RiakKvPB.scala
@@ -0,0 +1,1708 @@
+// Generated by ScalaBuff, the Scala Protocol Buffers compiler. DO NOT EDIT!
+// source: riak_kv.proto
+
+package com.basho.riak.protobuf
+
+//import "riak.proto"
+
+final case class RpbGetClientIdResp (
+ `clientId`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbGetClientIdResp] {
+
+
+ def clearClientId = copy(`clientId` = com.google.protobuf.ByteString.EMPTY)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `clientId`)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `clientId`)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbGetClientIdResp = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __clientId: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+
+ def __newMerged = RpbGetClientIdResp(
+ __clientId
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __clientId = in.readBytes()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbGetClientIdResp) = {
+ RpbGetClientIdResp(
+ m.`clientId`
+ )
+ }
+
+ def getDefaultInstanceForType = RpbGetClientIdResp.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbGetClientIdResp {
+ @reflect.BeanProperty val defaultInstance = new RpbGetClientIdResp()
+
+ val CLIENT_ID_FIELD_NUMBER = 1
+
+}
+final case class RpbSetClientIdReq (
+ `clientId`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbSetClientIdReq] {
+
+
+ def clearClientId = copy(`clientId` = com.google.protobuf.ByteString.EMPTY)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `clientId`)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `clientId`)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbSetClientIdReq = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __clientId: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+
+ def __newMerged = RpbSetClientIdReq(
+ __clientId
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __clientId = in.readBytes()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbSetClientIdReq) = {
+ RpbSetClientIdReq(
+ m.`clientId`
+ )
+ }
+
+ def getDefaultInstanceForType = RpbSetClientIdReq.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbSetClientIdReq {
+ @reflect.BeanProperty val defaultInstance = new RpbSetClientIdReq()
+
+ val CLIENT_ID_FIELD_NUMBER = 1
+
+}
+final case class RpbGetReq (
+ `bucket`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `key`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `r`: Option[Int] = None,
+ `pr`: Option[Int] = None,
+ `basicQuorum`: Option[Boolean] = None,
+ `notfoundOk`: Option[Boolean] = None,
+ `ifModified`: Option[com.google.protobuf.ByteString] = None,
+ `head`: Option[Boolean] = None,
+ `deletedvclock`: Option[Boolean] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbGetReq] {
+
+ def setR(_f: Int) = copy(`r` = _f)
+ def setPr(_f: Int) = copy(`pr` = _f)
+ def setBasicQuorum(_f: Boolean) = copy(`basicQuorum` = _f)
+ def setNotfoundOk(_f: Boolean) = copy(`notfoundOk` = _f)
+ def setIfModified(_f: com.google.protobuf.ByteString) = copy(`ifModified` = _f)
+ def setHead(_f: Boolean) = copy(`head` = _f)
+ def setDeletedvclock(_f: Boolean) = copy(`deletedvclock` = _f)
+
+ def clearBucket = copy(`bucket` = com.google.protobuf.ByteString.EMPTY)
+ def clearKey = copy(`key` = com.google.protobuf.ByteString.EMPTY)
+ def clearR = copy(`r` = None)
+ def clearPr = copy(`pr` = None)
+ def clearBasicQuorum = copy(`basicQuorum` = None)
+ def clearNotfoundOk = copy(`notfoundOk` = None)
+ def clearIfModified = copy(`ifModified` = None)
+ def clearHead = copy(`head` = None)
+ def clearDeletedvclock = copy(`deletedvclock` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `bucket`)
+ output.writeBytes(2, `key`)
+ if (`r`.isDefined) output.writeUInt32(3, `r`.get)
+ if (`pr`.isDefined) output.writeUInt32(4, `pr`.get)
+ if (`basicQuorum`.isDefined) output.writeBool(5, `basicQuorum`.get)
+ if (`notfoundOk`.isDefined) output.writeBool(6, `notfoundOk`.get)
+ if (`ifModified`.isDefined) output.writeBytes(7, `ifModified`.get)
+ if (`head`.isDefined) output.writeBool(8, `head`.get)
+ if (`deletedvclock`.isDefined) output.writeBool(9, `deletedvclock`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `bucket`)
+ size += computeBytesSize(2, `key`)
+ if (`r`.isDefined) size += computeUInt32Size(3, `r`.get)
+ if (`pr`.isDefined) size += computeUInt32Size(4, `pr`.get)
+ if (`basicQuorum`.isDefined) size += computeBoolSize(5, `basicQuorum`.get)
+ if (`notfoundOk`.isDefined) size += computeBoolSize(6, `notfoundOk`.get)
+ if (`ifModified`.isDefined) size += computeBytesSize(7, `ifModified`.get)
+ if (`head`.isDefined) size += computeBoolSize(8, `head`.get)
+ if (`deletedvclock`.isDefined) size += computeBoolSize(9, `deletedvclock`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbGetReq = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __bucket: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+ var __key: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+ var __r: Option[Int] = `r`
+ var __pr: Option[Int] = `pr`
+ var __basicQuorum: Option[Boolean] = `basicQuorum`
+ var __notfoundOk: Option[Boolean] = `notfoundOk`
+ var __ifModified: Option[com.google.protobuf.ByteString] = `ifModified`
+ var __head: Option[Boolean] = `head`
+ var __deletedvclock: Option[Boolean] = `deletedvclock`
+
+ def __newMerged = RpbGetReq(
+ __bucket,
+ __key,
+ __r,
+ __pr,
+ __basicQuorum,
+ __notfoundOk,
+ __ifModified,
+ __head,
+ __deletedvclock
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __bucket = in.readBytes()
+ case 18 => __key = in.readBytes()
+ case 24 => __r = in.readUInt32()
+ case 32 => __pr = in.readUInt32()
+ case 40 => __basicQuorum = in.readBool()
+ case 48 => __notfoundOk = in.readBool()
+ case 58 => __ifModified = in.readBytes()
+ case 64 => __head = in.readBool()
+ case 72 => __deletedvclock = in.readBool()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbGetReq) = {
+ RpbGetReq(
+ m.`bucket`,
+ m.`key`,
+ m.`r`.orElse(`r`),
+ m.`pr`.orElse(`pr`),
+ m.`basicQuorum`.orElse(`basicQuorum`),
+ m.`notfoundOk`.orElse(`notfoundOk`),
+ m.`ifModified`.orElse(`ifModified`),
+ m.`head`.orElse(`head`),
+ m.`deletedvclock`.orElse(`deletedvclock`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbGetReq.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbGetReq {
+ @reflect.BeanProperty val defaultInstance = new RpbGetReq()
+
+ val BUCKET_FIELD_NUMBER = 1
+ val KEY_FIELD_NUMBER = 2
+ val R_FIELD_NUMBER = 3
+ val PR_FIELD_NUMBER = 4
+ val BASIC_QUORUM_FIELD_NUMBER = 5
+ val NOTFOUND_OK_FIELD_NUMBER = 6
+ val IF_MODIFIED_FIELD_NUMBER = 7
+ val HEAD_FIELD_NUMBER = 8
+ val DELETEDVCLOCK_FIELD_NUMBER = 9
+
+}
+final case class RpbGetResp (
+ `content`: Vector[RpbContent] = Vector.empty[RpbContent],
+ `vclock`: Option[com.google.protobuf.ByteString] = None,
+ `unchanged`: Option[Boolean] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbGetResp] {
+
+ def setContent(_i: Int, _v: RpbContent) = copy(`content` = `content`.updated(_i, _v))
+ def addContent(_f: RpbContent) = copy(`content` = `content` :+ _f)
+ def addAllContent(_f: RpbContent*) = copy(`content` = `content` ++ _f)
+ def addAllContent(_f: TraversableOnce[RpbContent]) = copy(`content` = `content` ++ _f)
+ def setVclock(_f: com.google.protobuf.ByteString) = copy(`vclock` = _f)
+ def setUnchanged(_f: Boolean) = copy(`unchanged` = _f)
+
+ def clearContent = copy(`content` = Vector.empty[RpbContent])
+ def clearVclock = copy(`vclock` = None)
+ def clearUnchanged = copy(`unchanged` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ for (_v <- `content`) output.writeMessage(1, _v)
+ if (`vclock`.isDefined) output.writeBytes(2, `vclock`.get)
+ if (`unchanged`.isDefined) output.writeBool(3, `unchanged`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ for (_v <- `content`) size += computeMessageSize(1, _v)
+ if (`vclock`.isDefined) size += computeBytesSize(2, `vclock`.get)
+ if (`unchanged`.isDefined) size += computeBoolSize(3, `unchanged`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbGetResp = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ val __content: collection.mutable.Buffer[RpbContent] = `content`.toBuffer
+ var __vclock: Option[com.google.protobuf.ByteString] = `vclock`
+ var __unchanged: Option[Boolean] = `unchanged`
+
+ def __newMerged = RpbGetResp(
+ Vector(__content: _*),
+ __vclock,
+ __unchanged
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __content += readMessage[RpbContent](in, RpbContent.defaultInstance, _emptyRegistry)
+ case 18 => __vclock = in.readBytes()
+ case 24 => __unchanged = in.readBool()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbGetResp) = {
+ RpbGetResp(
+ `content` ++ m.`content`,
+ m.`vclock`.orElse(`vclock`),
+ m.`unchanged`.orElse(`unchanged`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbGetResp.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbGetResp {
+ @reflect.BeanProperty val defaultInstance = new RpbGetResp()
+
+ val CONTENT_FIELD_NUMBER = 1
+ val VCLOCK_FIELD_NUMBER = 2
+ val UNCHANGED_FIELD_NUMBER = 3
+
+}
+final case class RpbPutReq (
+ `bucket`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `key`: Option[com.google.protobuf.ByteString] = None,
+ `vclock`: Option[com.google.protobuf.ByteString] = None,
+ `content`: RpbContent = RpbContent.defaultInstance,
+ `w`: Option[Int] = None,
+ `dw`: Option[Int] = None,
+ `returnBody`: Option[Boolean] = None,
+ `pw`: Option[Int] = None,
+ `ifNotModified`: Option[Boolean] = None,
+ `ifNoneMatch`: Option[Boolean] = None,
+ `returnHead`: Option[Boolean] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbPutReq] {
+
+ def setKey(_f: com.google.protobuf.ByteString) = copy(`key` = _f)
+ def setVclock(_f: com.google.protobuf.ByteString) = copy(`vclock` = _f)
+ def setW(_f: Int) = copy(`w` = _f)
+ def setDw(_f: Int) = copy(`dw` = _f)
+ def setReturnBody(_f: Boolean) = copy(`returnBody` = _f)
+ def setPw(_f: Int) = copy(`pw` = _f)
+ def setIfNotModified(_f: Boolean) = copy(`ifNotModified` = _f)
+ def setIfNoneMatch(_f: Boolean) = copy(`ifNoneMatch` = _f)
+ def setReturnHead(_f: Boolean) = copy(`returnHead` = _f)
+
+ def clearBucket = copy(`bucket` = com.google.protobuf.ByteString.EMPTY)
+ def clearKey = copy(`key` = None)
+ def clearVclock = copy(`vclock` = None)
+ def clearContent = copy(`content` = RpbContent.defaultInstance)
+ def clearW = copy(`w` = None)
+ def clearDw = copy(`dw` = None)
+ def clearReturnBody = copy(`returnBody` = None)
+ def clearPw = copy(`pw` = None)
+ def clearIfNotModified = copy(`ifNotModified` = None)
+ def clearIfNoneMatch = copy(`ifNoneMatch` = None)
+ def clearReturnHead = copy(`returnHead` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `bucket`)
+ if (`key`.isDefined) output.writeBytes(2, `key`.get)
+ if (`vclock`.isDefined) output.writeBytes(3, `vclock`.get)
+ output.writeMessage(4, `content`)
+ if (`w`.isDefined) output.writeUInt32(5, `w`.get)
+ if (`dw`.isDefined) output.writeUInt32(6, `dw`.get)
+ if (`returnBody`.isDefined) output.writeBool(7, `returnBody`.get)
+ if (`pw`.isDefined) output.writeUInt32(8, `pw`.get)
+ if (`ifNotModified`.isDefined) output.writeBool(9, `ifNotModified`.get)
+ if (`ifNoneMatch`.isDefined) output.writeBool(10, `ifNoneMatch`.get)
+ if (`returnHead`.isDefined) output.writeBool(11, `returnHead`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `bucket`)
+ if (`key`.isDefined) size += computeBytesSize(2, `key`.get)
+ if (`vclock`.isDefined) size += computeBytesSize(3, `vclock`.get)
+ size += computeMessageSize(4, `content`)
+ if (`w`.isDefined) size += computeUInt32Size(5, `w`.get)
+ if (`dw`.isDefined) size += computeUInt32Size(6, `dw`.get)
+ if (`returnBody`.isDefined) size += computeBoolSize(7, `returnBody`.get)
+ if (`pw`.isDefined) size += computeUInt32Size(8, `pw`.get)
+ if (`ifNotModified`.isDefined) size += computeBoolSize(9, `ifNotModified`.get)
+ if (`ifNoneMatch`.isDefined) size += computeBoolSize(10, `ifNoneMatch`.get)
+ if (`returnHead`.isDefined) size += computeBoolSize(11, `returnHead`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbPutReq = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __bucket: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+ var __key: Option[com.google.protobuf.ByteString] = `key`
+ var __vclock: Option[com.google.protobuf.ByteString] = `vclock`
+ var __content: RpbContent = RpbContent.defaultInstance
+ var __w: Option[Int] = `w`
+ var __dw: Option[Int] = `dw`
+ var __returnBody: Option[Boolean] = `returnBody`
+ var __pw: Option[Int] = `pw`
+ var __ifNotModified: Option[Boolean] = `ifNotModified`
+ var __ifNoneMatch: Option[Boolean] = `ifNoneMatch`
+ var __returnHead: Option[Boolean] = `returnHead`
+
+ def __newMerged = RpbPutReq(
+ __bucket,
+ __key,
+ __vclock,
+ __content,
+ __w,
+ __dw,
+ __returnBody,
+ __pw,
+ __ifNotModified,
+ __ifNoneMatch,
+ __returnHead
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __bucket = in.readBytes()
+ case 18 => __key = in.readBytes()
+ case 26 => __vclock = in.readBytes()
+ case 34 => __content = readMessage[RpbContent](in, __content, _emptyRegistry)
+ case 40 => __w = in.readUInt32()
+ case 48 => __dw = in.readUInt32()
+ case 56 => __returnBody = in.readBool()
+ case 64 => __pw = in.readUInt32()
+ case 72 => __ifNotModified = in.readBool()
+ case 80 => __ifNoneMatch = in.readBool()
+ case 88 => __returnHead = in.readBool()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbPutReq) = {
+ RpbPutReq(
+ m.`bucket`,
+ m.`key`.orElse(`key`),
+ m.`vclock`.orElse(`vclock`),
+ m.`content`,
+ m.`w`.orElse(`w`),
+ m.`dw`.orElse(`dw`),
+ m.`returnBody`.orElse(`returnBody`),
+ m.`pw`.orElse(`pw`),
+ m.`ifNotModified`.orElse(`ifNotModified`),
+ m.`ifNoneMatch`.orElse(`ifNoneMatch`),
+ m.`returnHead`.orElse(`returnHead`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbPutReq.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbPutReq {
+ @reflect.BeanProperty val defaultInstance = new RpbPutReq()
+
+ val BUCKET_FIELD_NUMBER = 1
+ val KEY_FIELD_NUMBER = 2
+ val VCLOCK_FIELD_NUMBER = 3
+ val CONTENT_FIELD_NUMBER = 4
+ val W_FIELD_NUMBER = 5
+ val DW_FIELD_NUMBER = 6
+ val RETURN_BODY_FIELD_NUMBER = 7
+ val PW_FIELD_NUMBER = 8
+ val IF_NOT_MODIFIED_FIELD_NUMBER = 9
+ val IF_NONE_MATCH_FIELD_NUMBER = 10
+ val RETURN_HEAD_FIELD_NUMBER = 11
+
+}
+final case class RpbPutResp (
+ `content`: Vector[RpbContent] = Vector.empty[RpbContent],
+ `vclock`: Option[com.google.protobuf.ByteString] = None,
+ `key`: Option[com.google.protobuf.ByteString] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbPutResp] {
+
+ def setContent(_i: Int, _v: RpbContent) = copy(`content` = `content`.updated(_i, _v))
+ def addContent(_f: RpbContent) = copy(`content` = `content` :+ _f)
+ def addAllContent(_f: RpbContent*) = copy(`content` = `content` ++ _f)
+ def addAllContent(_f: TraversableOnce[RpbContent]) = copy(`content` = `content` ++ _f)
+ def setVclock(_f: com.google.protobuf.ByteString) = copy(`vclock` = _f)
+ def setKey(_f: com.google.protobuf.ByteString) = copy(`key` = _f)
+
+ def clearContent = copy(`content` = Vector.empty[RpbContent])
+ def clearVclock = copy(`vclock` = None)
+ def clearKey = copy(`key` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ for (_v <- `content`) output.writeMessage(1, _v)
+ if (`vclock`.isDefined) output.writeBytes(2, `vclock`.get)
+ if (`key`.isDefined) output.writeBytes(3, `key`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ for (_v <- `content`) size += computeMessageSize(1, _v)
+ if (`vclock`.isDefined) size += computeBytesSize(2, `vclock`.get)
+ if (`key`.isDefined) size += computeBytesSize(3, `key`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbPutResp = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ val __content: collection.mutable.Buffer[RpbContent] = `content`.toBuffer
+ var __vclock: Option[com.google.protobuf.ByteString] = `vclock`
+ var __key: Option[com.google.protobuf.ByteString] = `key`
+
+ def __newMerged = RpbPutResp(
+ Vector(__content: _*),
+ __vclock,
+ __key
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __content += readMessage[RpbContent](in, RpbContent.defaultInstance, _emptyRegistry)
+ case 18 => __vclock = in.readBytes()
+ case 26 => __key = in.readBytes()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbPutResp) = {
+ RpbPutResp(
+ `content` ++ m.`content`,
+ m.`vclock`.orElse(`vclock`),
+ m.`key`.orElse(`key`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbPutResp.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbPutResp {
+ @reflect.BeanProperty val defaultInstance = new RpbPutResp()
+
+ val CONTENT_FIELD_NUMBER = 1
+ val VCLOCK_FIELD_NUMBER = 2
+ val KEY_FIELD_NUMBER = 3
+
+}
+final case class RpbDelReq (
+ `bucket`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `key`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `rw`: Option[Int] = None,
+ `vclock`: Option[com.google.protobuf.ByteString] = None,
+ `r`: Option[Int] = None,
+ `w`: Option[Int] = None,
+ `pr`: Option[Int] = None,
+ `pw`: Option[Int] = None,
+ `dw`: Option[Int] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbDelReq] {
+
+ def setRw(_f: Int) = copy(`rw` = _f)
+ def setVclock(_f: com.google.protobuf.ByteString) = copy(`vclock` = _f)
+ def setR(_f: Int) = copy(`r` = _f)
+ def setW(_f: Int) = copy(`w` = _f)
+ def setPr(_f: Int) = copy(`pr` = _f)
+ def setPw(_f: Int) = copy(`pw` = _f)
+ def setDw(_f: Int) = copy(`dw` = _f)
+
+ def clearBucket = copy(`bucket` = com.google.protobuf.ByteString.EMPTY)
+ def clearKey = copy(`key` = com.google.protobuf.ByteString.EMPTY)
+ def clearRw = copy(`rw` = None)
+ def clearVclock = copy(`vclock` = None)
+ def clearR = copy(`r` = None)
+ def clearW = copy(`w` = None)
+ def clearPr = copy(`pr` = None)
+ def clearPw = copy(`pw` = None)
+ def clearDw = copy(`dw` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `bucket`)
+ output.writeBytes(2, `key`)
+ if (`rw`.isDefined) output.writeUInt32(3, `rw`.get)
+ if (`vclock`.isDefined) output.writeBytes(4, `vclock`.get)
+ if (`r`.isDefined) output.writeUInt32(5, `r`.get)
+ if (`w`.isDefined) output.writeUInt32(6, `w`.get)
+ if (`pr`.isDefined) output.writeUInt32(7, `pr`.get)
+ if (`pw`.isDefined) output.writeUInt32(8, `pw`.get)
+ if (`dw`.isDefined) output.writeUInt32(9, `dw`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `bucket`)
+ size += computeBytesSize(2, `key`)
+ if (`rw`.isDefined) size += computeUInt32Size(3, `rw`.get)
+ if (`vclock`.isDefined) size += computeBytesSize(4, `vclock`.get)
+ if (`r`.isDefined) size += computeUInt32Size(5, `r`.get)
+ if (`w`.isDefined) size += computeUInt32Size(6, `w`.get)
+ if (`pr`.isDefined) size += computeUInt32Size(7, `pr`.get)
+ if (`pw`.isDefined) size += computeUInt32Size(8, `pw`.get)
+ if (`dw`.isDefined) size += computeUInt32Size(9, `dw`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbDelReq = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __bucket: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+ var __key: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+ var __rw: Option[Int] = `rw`
+ var __vclock: Option[com.google.protobuf.ByteString] = `vclock`
+ var __r: Option[Int] = `r`
+ var __w: Option[Int] = `w`
+ var __pr: Option[Int] = `pr`
+ var __pw: Option[Int] = `pw`
+ var __dw: Option[Int] = `dw`
+
+ def __newMerged = RpbDelReq(
+ __bucket,
+ __key,
+ __rw,
+ __vclock,
+ __r,
+ __w,
+ __pr,
+ __pw,
+ __dw
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __bucket = in.readBytes()
+ case 18 => __key = in.readBytes()
+ case 24 => __rw = in.readUInt32()
+ case 34 => __vclock = in.readBytes()
+ case 40 => __r = in.readUInt32()
+ case 48 => __w = in.readUInt32()
+ case 56 => __pr = in.readUInt32()
+ case 64 => __pw = in.readUInt32()
+ case 72 => __dw = in.readUInt32()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbDelReq) = {
+ RpbDelReq(
+ m.`bucket`,
+ m.`key`,
+ m.`rw`.orElse(`rw`),
+ m.`vclock`.orElse(`vclock`),
+ m.`r`.orElse(`r`),
+ m.`w`.orElse(`w`),
+ m.`pr`.orElse(`pr`),
+ m.`pw`.orElse(`pw`),
+ m.`dw`.orElse(`dw`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbDelReq.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbDelReq {
+ @reflect.BeanProperty val defaultInstance = new RpbDelReq()
+
+ val BUCKET_FIELD_NUMBER = 1
+ val KEY_FIELD_NUMBER = 2
+ val RW_FIELD_NUMBER = 3
+ val VCLOCK_FIELD_NUMBER = 4
+ val R_FIELD_NUMBER = 5
+ val W_FIELD_NUMBER = 6
+ val PR_FIELD_NUMBER = 7
+ val PW_FIELD_NUMBER = 8
+ val DW_FIELD_NUMBER = 9
+
+}
+final case class RpbListBucketsResp (
+ `buckets`: Vector[com.google.protobuf.ByteString] = Vector.empty[com.google.protobuf.ByteString]
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbListBucketsResp] {
+
+ def setBuckets(_i: Int, _v: com.google.protobuf.ByteString) = copy(`buckets` = `buckets`.updated(_i, _v))
+ def addBuckets(_f: com.google.protobuf.ByteString) = copy(`buckets` = `buckets` :+ _f)
+ def addAllBuckets(_f: com.google.protobuf.ByteString*) = copy(`buckets` = `buckets` ++ _f)
+ def addAllBuckets(_f: TraversableOnce[com.google.protobuf.ByteString]) = copy(`buckets` = `buckets` ++ _f)
+
+ def clearBuckets = copy(`buckets` = Vector.empty[com.google.protobuf.ByteString])
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ for (_v <- `buckets`) output.writeBytes(1, _v)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ for (_v <- `buckets`) size += computeBytesSize(1, _v)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbListBucketsResp = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ val __buckets: collection.mutable.Buffer[com.google.protobuf.ByteString] = `buckets`.toBuffer
+
+ def __newMerged = RpbListBucketsResp(
+ Vector(__buckets: _*)
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __buckets += in.readBytes()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbListBucketsResp) = {
+ RpbListBucketsResp(
+ `buckets` ++ m.`buckets`
+ )
+ }
+
+ def getDefaultInstanceForType = RpbListBucketsResp.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbListBucketsResp {
+ @reflect.BeanProperty val defaultInstance = new RpbListBucketsResp()
+
+ val BUCKETS_FIELD_NUMBER = 1
+
+}
+final case class RpbListKeysReq (
+ `bucket`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbListKeysReq] {
+
+
+ def clearBucket = copy(`bucket` = com.google.protobuf.ByteString.EMPTY)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `bucket`)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `bucket`)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbListKeysReq = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __bucket: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+
+ def __newMerged = RpbListKeysReq(
+ __bucket
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __bucket = in.readBytes()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbListKeysReq) = {
+ RpbListKeysReq(
+ m.`bucket`
+ )
+ }
+
+ def getDefaultInstanceForType = RpbListKeysReq.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbListKeysReq {
+ @reflect.BeanProperty val defaultInstance = new RpbListKeysReq()
+
+ val BUCKET_FIELD_NUMBER = 1
+
+}
+final case class RpbListKeysResp (
+ `keys`: Vector[com.google.protobuf.ByteString] = Vector.empty[com.google.protobuf.ByteString],
+ `done`: Option[Boolean] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbListKeysResp] {
+
+ def setKeys(_i: Int, _v: com.google.protobuf.ByteString) = copy(`keys` = `keys`.updated(_i, _v))
+ def addKeys(_f: com.google.protobuf.ByteString) = copy(`keys` = `keys` :+ _f)
+ def addAllKeys(_f: com.google.protobuf.ByteString*) = copy(`keys` = `keys` ++ _f)
+ def addAllKeys(_f: TraversableOnce[com.google.protobuf.ByteString]) = copy(`keys` = `keys` ++ _f)
+ def setDone(_f: Boolean) = copy(`done` = _f)
+
+ def clearKeys = copy(`keys` = Vector.empty[com.google.protobuf.ByteString])
+ def clearDone = copy(`done` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ for (_v <- `keys`) output.writeBytes(1, _v)
+ if (`done`.isDefined) output.writeBool(2, `done`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ for (_v <- `keys`) size += computeBytesSize(1, _v)
+ if (`done`.isDefined) size += computeBoolSize(2, `done`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbListKeysResp = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ val __keys: collection.mutable.Buffer[com.google.protobuf.ByteString] = `keys`.toBuffer
+ var __done: Option[Boolean] = `done`
+
+ def __newMerged = RpbListKeysResp(
+ Vector(__keys: _*),
+ __done
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __keys += in.readBytes()
+ case 16 => __done = in.readBool()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbListKeysResp) = {
+ RpbListKeysResp(
+ `keys` ++ m.`keys`,
+ m.`done`.orElse(`done`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbListKeysResp.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbListKeysResp {
+ @reflect.BeanProperty val defaultInstance = new RpbListKeysResp()
+
+ val KEYS_FIELD_NUMBER = 1
+ val DONE_FIELD_NUMBER = 2
+
+}
+final case class RpbGetBucketReq (
+ `bucket`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbGetBucketReq] {
+
+
+ def clearBucket = copy(`bucket` = com.google.protobuf.ByteString.EMPTY)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `bucket`)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `bucket`)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbGetBucketReq = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __bucket: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+
+ def __newMerged = RpbGetBucketReq(
+ __bucket
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __bucket = in.readBytes()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbGetBucketReq) = {
+ RpbGetBucketReq(
+ m.`bucket`
+ )
+ }
+
+ def getDefaultInstanceForType = RpbGetBucketReq.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbGetBucketReq {
+ @reflect.BeanProperty val defaultInstance = new RpbGetBucketReq()
+
+ val BUCKET_FIELD_NUMBER = 1
+
+}
+final case class RpbGetBucketResp (
+ `props`: RpbBucketProps = RpbBucketProps.defaultInstance
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbGetBucketResp] {
+
+
+ def clearProps = copy(`props` = RpbBucketProps.defaultInstance)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeMessage(1, `props`)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeMessageSize(1, `props`)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbGetBucketResp = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __props: RpbBucketProps = RpbBucketProps.defaultInstance
+
+ def __newMerged = RpbGetBucketResp(
+ __props
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __props = readMessage[RpbBucketProps](in, __props, _emptyRegistry)
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbGetBucketResp) = {
+ RpbGetBucketResp(
+ m.`props`
+ )
+ }
+
+ def getDefaultInstanceForType = RpbGetBucketResp.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbGetBucketResp {
+ @reflect.BeanProperty val defaultInstance = new RpbGetBucketResp()
+
+ val PROPS_FIELD_NUMBER = 1
+
+}
+final case class RpbSetBucketReq (
+ `bucket`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `props`: RpbBucketProps = RpbBucketProps.defaultInstance
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbSetBucketReq] {
+
+
+ def clearBucket = copy(`bucket` = com.google.protobuf.ByteString.EMPTY)
+ def clearProps = copy(`props` = RpbBucketProps.defaultInstance)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `bucket`)
+ output.writeMessage(2, `props`)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `bucket`)
+ size += computeMessageSize(2, `props`)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbSetBucketReq = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __bucket: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+ var __props: RpbBucketProps = RpbBucketProps.defaultInstance
+
+ def __newMerged = RpbSetBucketReq(
+ __bucket,
+ __props
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __bucket = in.readBytes()
+ case 18 => __props = readMessage[RpbBucketProps](in, __props, _emptyRegistry)
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbSetBucketReq) = {
+ RpbSetBucketReq(
+ m.`bucket`,
+ m.`props`
+ )
+ }
+
+ def getDefaultInstanceForType = RpbSetBucketReq.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbSetBucketReq {
+ @reflect.BeanProperty val defaultInstance = new RpbSetBucketReq()
+
+ val BUCKET_FIELD_NUMBER = 1
+ val PROPS_FIELD_NUMBER = 2
+
+}
+final case class RpbMapRedReq (
+ `request`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `contentType`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbMapRedReq] {
+
+
+ def clearRequest = copy(`request` = com.google.protobuf.ByteString.EMPTY)
+ def clearContentType = copy(`contentType` = com.google.protobuf.ByteString.EMPTY)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `request`)
+ output.writeBytes(2, `contentType`)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `request`)
+ size += computeBytesSize(2, `contentType`)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbMapRedReq = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __request: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+ var __contentType: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+
+ def __newMerged = RpbMapRedReq(
+ __request,
+ __contentType
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __request = in.readBytes()
+ case 18 => __contentType = in.readBytes()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbMapRedReq) = {
+ RpbMapRedReq(
+ m.`request`,
+ m.`contentType`
+ )
+ }
+
+ def getDefaultInstanceForType = RpbMapRedReq.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbMapRedReq {
+ @reflect.BeanProperty val defaultInstance = new RpbMapRedReq()
+
+ val REQUEST_FIELD_NUMBER = 1
+ val CONTENT_TYPE_FIELD_NUMBER = 2
+
+}
+final case class RpbMapRedResp (
+ `phase`: Option[Int] = None,
+ `response`: Option[com.google.protobuf.ByteString] = None,
+ `done`: Option[Boolean] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbMapRedResp] {
+
+ def setPhase(_f: Int) = copy(`phase` = _f)
+ def setResponse(_f: com.google.protobuf.ByteString) = copy(`response` = _f)
+ def setDone(_f: Boolean) = copy(`done` = _f)
+
+ def clearPhase = copy(`phase` = None)
+ def clearResponse = copy(`response` = None)
+ def clearDone = copy(`done` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ if (`phase`.isDefined) output.writeUInt32(1, `phase`.get)
+ if (`response`.isDefined) output.writeBytes(2, `response`.get)
+ if (`done`.isDefined) output.writeBool(3, `done`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ if (`phase`.isDefined) size += computeUInt32Size(1, `phase`.get)
+ if (`response`.isDefined) size += computeBytesSize(2, `response`.get)
+ if (`done`.isDefined) size += computeBoolSize(3, `done`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbMapRedResp = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __phase: Option[Int] = `phase`
+ var __response: Option[com.google.protobuf.ByteString] = `response`
+ var __done: Option[Boolean] = `done`
+
+ def __newMerged = RpbMapRedResp(
+ __phase,
+ __response,
+ __done
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 8 => __phase = in.readUInt32()
+ case 18 => __response = in.readBytes()
+ case 24 => __done = in.readBool()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbMapRedResp) = {
+ RpbMapRedResp(
+ m.`phase`.orElse(`phase`),
+ m.`response`.orElse(`response`),
+ m.`done`.orElse(`done`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbMapRedResp.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbMapRedResp {
+ @reflect.BeanProperty val defaultInstance = new RpbMapRedResp()
+
+ val PHASE_FIELD_NUMBER = 1
+ val RESPONSE_FIELD_NUMBER = 2
+ val DONE_FIELD_NUMBER = 3
+
+}
+final case class RpbIndexReq (
+ `bucket`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `index`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `qtype`: RpbIndexReq.IndexQueryType.EnumVal = RpbIndexReq.IndexQueryType._UNINITIALIZED,
+ `key`: Option[com.google.protobuf.ByteString] = None,
+ `rangeMin`: Option[com.google.protobuf.ByteString] = None,
+ `rangeMax`: Option[com.google.protobuf.ByteString] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbIndexReq] {
+
+ def setKey(_f: com.google.protobuf.ByteString) = copy(`key` = _f)
+ def setRangeMin(_f: com.google.protobuf.ByteString) = copy(`rangeMin` = _f)
+ def setRangeMax(_f: com.google.protobuf.ByteString) = copy(`rangeMax` = _f)
+
+ def clearBucket = copy(`bucket` = com.google.protobuf.ByteString.EMPTY)
+ def clearIndex = copy(`index` = com.google.protobuf.ByteString.EMPTY)
+ def clearQtype = copy(`qtype` = RpbIndexReq.IndexQueryType._UNINITIALIZED)
+ def clearKey = copy(`key` = None)
+ def clearRangeMin = copy(`rangeMin` = None)
+ def clearRangeMax = copy(`rangeMax` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `bucket`)
+ output.writeBytes(2, `index`)
+ output.writeEnum(3, `qtype`)
+ if (`key`.isDefined) output.writeBytes(4, `key`.get)
+ if (`rangeMin`.isDefined) output.writeBytes(5, `rangeMin`.get)
+ if (`rangeMax`.isDefined) output.writeBytes(6, `rangeMax`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `bucket`)
+ size += computeBytesSize(2, `index`)
+ size += computeEnumSize(3, `qtype`)
+ if (`key`.isDefined) size += computeBytesSize(4, `key`.get)
+ if (`rangeMin`.isDefined) size += computeBytesSize(5, `rangeMin`.get)
+ if (`rangeMax`.isDefined) size += computeBytesSize(6, `rangeMax`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbIndexReq = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __bucket: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+ var __index: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+ var __qtype: RpbIndexReq.IndexQueryType.EnumVal = RpbIndexReq.IndexQueryType._UNINITIALIZED
+ var __key: Option[com.google.protobuf.ByteString] = `key`
+ var __rangeMin: Option[com.google.protobuf.ByteString] = `rangeMin`
+ var __rangeMax: Option[com.google.protobuf.ByteString] = `rangeMax`
+
+ def __newMerged = RpbIndexReq(
+ __bucket,
+ __index,
+ __qtype,
+ __key,
+ __rangeMin,
+ __rangeMax
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __bucket = in.readBytes()
+ case 18 => __index = in.readBytes()
+ case 24 => __qtype = RpbIndexReq.IndexQueryType.valueOf(in.readEnum())
+ case 34 => __key = in.readBytes()
+ case 42 => __rangeMin = in.readBytes()
+ case 50 => __rangeMax = in.readBytes()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbIndexReq) = {
+ RpbIndexReq(
+ m.`bucket`,
+ m.`index`,
+ m.`qtype`,
+ m.`key`.orElse(`key`),
+ m.`rangeMin`.orElse(`rangeMin`),
+ m.`rangeMax`.orElse(`rangeMax`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbIndexReq.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbIndexReq {
+ @reflect.BeanProperty val defaultInstance = new RpbIndexReq()
+
+ val BUCKET_FIELD_NUMBER = 1
+ val INDEX_FIELD_NUMBER = 2
+ val QTYPE_FIELD_NUMBER = 3
+ val KEY_FIELD_NUMBER = 4
+ val RANGE_MIN_FIELD_NUMBER = 5
+ val RANGE_MAX_FIELD_NUMBER = 6
+
+ object IndexQueryType extends Enum {
+ sealed trait EnumVal extends Value
+ val _UNINITIALIZED = new EnumVal { val name = "UNINITIALIZED ENUM VALUE"; val id = -1 }
+
+ val eq = new EnumVal { val name = "eq"; val id = 0 }
+ val range = new EnumVal { val name = "range"; val id = 1 }
+
+ val eq_VALUE = 0
+ val range_VALUE = 1
+
+ def valueOf(id: Int) = id match {
+ case 0 => eq
+ case 1 => range
+ case _default => throw new UnknownEnumException(_default)
+ }
+ val internalGetValueMap = new com.google.protobuf.Internal.EnumLiteMap[EnumVal] {
+ def findValueByNumber(id: Int): EnumVal = valueOf(id)
+ }
+ }
+
+}
+final case class RpbIndexResp (
+ `keys`: Vector[com.google.protobuf.ByteString] = Vector.empty[com.google.protobuf.ByteString]
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbIndexResp] {
+
+ def setKeys(_i: Int, _v: com.google.protobuf.ByteString) = copy(`keys` = `keys`.updated(_i, _v))
+ def addKeys(_f: com.google.protobuf.ByteString) = copy(`keys` = `keys` :+ _f)
+ def addAllKeys(_f: com.google.protobuf.ByteString*) = copy(`keys` = `keys` ++ _f)
+ def addAllKeys(_f: TraversableOnce[com.google.protobuf.ByteString]) = copy(`keys` = `keys` ++ _f)
+
+ def clearKeys = copy(`keys` = Vector.empty[com.google.protobuf.ByteString])
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ for (_v <- `keys`) output.writeBytes(1, _v)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ for (_v <- `keys`) size += computeBytesSize(1, _v)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbIndexResp = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ val __keys: collection.mutable.Buffer[com.google.protobuf.ByteString] = `keys`.toBuffer
+
+ def __newMerged = RpbIndexResp(
+ Vector(__keys: _*)
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __keys += in.readBytes()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbIndexResp) = {
+ RpbIndexResp(
+ `keys` ++ m.`keys`
+ )
+ }
+
+ def getDefaultInstanceForType = RpbIndexResp.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbIndexResp {
+ @reflect.BeanProperty val defaultInstance = new RpbIndexResp()
+
+ val KEYS_FIELD_NUMBER = 1
+
+}
+final case class RpbContent (
+ `value`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `contentType`: Option[com.google.protobuf.ByteString] = None,
+ `charset`: Option[com.google.protobuf.ByteString] = None,
+ `contentEncoding`: Option[com.google.protobuf.ByteString] = None,
+ `vtag`: Option[com.google.protobuf.ByteString] = None,
+ `links`: Vector[RpbLink] = Vector.empty[RpbLink],
+ `lastMod`: Option[Int] = None,
+ `lastModUsecs`: Option[Int] = None,
+ `usermeta`: Vector[RpbPair] = Vector.empty[RpbPair],
+ `indexes`: Vector[RpbPair] = Vector.empty[RpbPair],
+ `deleted`: Option[Boolean] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbContent] {
+
+ def setContentType(_f: com.google.protobuf.ByteString) = copy(`contentType` = _f)
+ def setCharset(_f: com.google.protobuf.ByteString) = copy(`charset` = _f)
+ def setContentEncoding(_f: com.google.protobuf.ByteString) = copy(`contentEncoding` = _f)
+ def setVtag(_f: com.google.protobuf.ByteString) = copy(`vtag` = _f)
+ def setLinks(_i: Int, _v: RpbLink) = copy(`links` = `links`.updated(_i, _v))
+ def addLinks(_f: RpbLink) = copy(`links` = `links` :+ _f)
+ def addAllLinks(_f: RpbLink*) = copy(`links` = `links` ++ _f)
+ def addAllLinks(_f: TraversableOnce[RpbLink]) = copy(`links` = `links` ++ _f)
+ def setLastMod(_f: Int) = copy(`lastMod` = _f)
+ def setLastModUsecs(_f: Int) = copy(`lastModUsecs` = _f)
+ def setUsermeta(_i: Int, _v: RpbPair) = copy(`usermeta` = `usermeta`.updated(_i, _v))
+ def addUsermeta(_f: RpbPair) = copy(`usermeta` = `usermeta` :+ _f)
+ def addAllUsermeta(_f: RpbPair*) = copy(`usermeta` = `usermeta` ++ _f)
+ def addAllUsermeta(_f: TraversableOnce[RpbPair]) = copy(`usermeta` = `usermeta` ++ _f)
+ def setIndexes(_i: Int, _v: RpbPair) = copy(`indexes` = `indexes`.updated(_i, _v))
+ def addIndexes(_f: RpbPair) = copy(`indexes` = `indexes` :+ _f)
+ def addAllIndexes(_f: RpbPair*) = copy(`indexes` = `indexes` ++ _f)
+ def addAllIndexes(_f: TraversableOnce[RpbPair]) = copy(`indexes` = `indexes` ++ _f)
+ def setDeleted(_f: Boolean) = copy(`deleted` = _f)
+
+ def clearValue = copy(`value` = com.google.protobuf.ByteString.EMPTY)
+ def clearContentType = copy(`contentType` = None)
+ def clearCharset = copy(`charset` = None)
+ def clearContentEncoding = copy(`contentEncoding` = None)
+ def clearVtag = copy(`vtag` = None)
+ def clearLinks = copy(`links` = Vector.empty[RpbLink])
+ def clearLastMod = copy(`lastMod` = None)
+ def clearLastModUsecs = copy(`lastModUsecs` = None)
+ def clearUsermeta = copy(`usermeta` = Vector.empty[RpbPair])
+ def clearIndexes = copy(`indexes` = Vector.empty[RpbPair])
+ def clearDeleted = copy(`deleted` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ output.writeBytes(1, `value`)
+ if (`contentType`.isDefined) output.writeBytes(2, `contentType`.get)
+ if (`charset`.isDefined) output.writeBytes(3, `charset`.get)
+ if (`contentEncoding`.isDefined) output.writeBytes(4, `contentEncoding`.get)
+ if (`vtag`.isDefined) output.writeBytes(5, `vtag`.get)
+ for (_v <- `links`) output.writeMessage(6, _v)
+ if (`lastMod`.isDefined) output.writeUInt32(7, `lastMod`.get)
+ if (`lastModUsecs`.isDefined) output.writeUInt32(8, `lastModUsecs`.get)
+ for (_v <- `usermeta`) output.writeMessage(9, _v)
+ for (_v <- `indexes`) output.writeMessage(10, _v)
+ if (`deleted`.isDefined) output.writeBool(11, `deleted`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ size += computeBytesSize(1, `value`)
+ if (`contentType`.isDefined) size += computeBytesSize(2, `contentType`.get)
+ if (`charset`.isDefined) size += computeBytesSize(3, `charset`.get)
+ if (`contentEncoding`.isDefined) size += computeBytesSize(4, `contentEncoding`.get)
+ if (`vtag`.isDefined) size += computeBytesSize(5, `vtag`.get)
+ for (_v <- `links`) size += computeMessageSize(6, _v)
+ if (`lastMod`.isDefined) size += computeUInt32Size(7, `lastMod`.get)
+ if (`lastModUsecs`.isDefined) size += computeUInt32Size(8, `lastModUsecs`.get)
+ for (_v <- `usermeta`) size += computeMessageSize(9, _v)
+ for (_v <- `indexes`) size += computeMessageSize(10, _v)
+ if (`deleted`.isDefined) size += computeBoolSize(11, `deleted`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbContent = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __value: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY
+ var __contentType: Option[com.google.protobuf.ByteString] = `contentType`
+ var __charset: Option[com.google.protobuf.ByteString] = `charset`
+ var __contentEncoding: Option[com.google.protobuf.ByteString] = `contentEncoding`
+ var __vtag: Option[com.google.protobuf.ByteString] = `vtag`
+ val __links: collection.mutable.Buffer[RpbLink] = `links`.toBuffer
+ var __lastMod: Option[Int] = `lastMod`
+ var __lastModUsecs: Option[Int] = `lastModUsecs`
+ val __usermeta: collection.mutable.Buffer[RpbPair] = `usermeta`.toBuffer
+ val __indexes: collection.mutable.Buffer[RpbPair] = `indexes`.toBuffer
+ var __deleted: Option[Boolean] = `deleted`
+
+ def __newMerged = RpbContent(
+ __value,
+ __contentType,
+ __charset,
+ __contentEncoding,
+ __vtag,
+ Vector(__links: _*),
+ __lastMod,
+ __lastModUsecs,
+ Vector(__usermeta: _*),
+ Vector(__indexes: _*),
+ __deleted
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __value = in.readBytes()
+ case 18 => __contentType = in.readBytes()
+ case 26 => __charset = in.readBytes()
+ case 34 => __contentEncoding = in.readBytes()
+ case 42 => __vtag = in.readBytes()
+ case 50 => __links += readMessage[RpbLink](in, RpbLink.defaultInstance, _emptyRegistry)
+ case 56 => __lastMod = in.readUInt32()
+ case 64 => __lastModUsecs = in.readUInt32()
+ case 74 => __usermeta += readMessage[RpbPair](in, RpbPair.defaultInstance, _emptyRegistry)
+ case 82 => __indexes += readMessage[RpbPair](in, RpbPair.defaultInstance, _emptyRegistry)
+ case 88 => __deleted = in.readBool()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbContent) = {
+ RpbContent(
+ m.`value`,
+ m.`contentType`.orElse(`contentType`),
+ m.`charset`.orElse(`charset`),
+ m.`contentEncoding`.orElse(`contentEncoding`),
+ m.`vtag`.orElse(`vtag`),
+ `links` ++ m.`links`,
+ m.`lastMod`.orElse(`lastMod`),
+ m.`lastModUsecs`.orElse(`lastModUsecs`),
+ `usermeta` ++ m.`usermeta`,
+ `indexes` ++ m.`indexes`,
+ m.`deleted`.orElse(`deleted`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbContent.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbContent {
+ @reflect.BeanProperty val defaultInstance = new RpbContent()
+
+ val VALUE_FIELD_NUMBER = 1
+ val CONTENT_TYPE_FIELD_NUMBER = 2
+ val CHARSET_FIELD_NUMBER = 3
+ val CONTENT_ENCODING_FIELD_NUMBER = 4
+ val VTAG_FIELD_NUMBER = 5
+ val LINKS_FIELD_NUMBER = 6
+ val LAST_MOD_FIELD_NUMBER = 7
+ val LAST_MOD_USECS_FIELD_NUMBER = 8
+ val USERMETA_FIELD_NUMBER = 9
+ val INDEXES_FIELD_NUMBER = 10
+ val DELETED_FIELD_NUMBER = 11
+
+}
+final case class RpbLink (
+ `bucket`: Option[com.google.protobuf.ByteString] = None,
+ `key`: Option[com.google.protobuf.ByteString] = None,
+ `tag`: Option[com.google.protobuf.ByteString] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbLink] {
+
+ def setBucket(_f: com.google.protobuf.ByteString) = copy(`bucket` = _f)
+ def setKey(_f: com.google.protobuf.ByteString) = copy(`key` = _f)
+ def setTag(_f: com.google.protobuf.ByteString) = copy(`tag` = _f)
+
+ def clearBucket = copy(`bucket` = None)
+ def clearKey = copy(`key` = None)
+ def clearTag = copy(`tag` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ if (`bucket`.isDefined) output.writeBytes(1, `bucket`.get)
+ if (`key`.isDefined) output.writeBytes(2, `key`.get)
+ if (`tag`.isDefined) output.writeBytes(3, `tag`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ if (`bucket`.isDefined) size += computeBytesSize(1, `bucket`.get)
+ if (`key`.isDefined) size += computeBytesSize(2, `key`.get)
+ if (`tag`.isDefined) size += computeBytesSize(3, `tag`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbLink = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __bucket: Option[com.google.protobuf.ByteString] = `bucket`
+ var __key: Option[com.google.protobuf.ByteString] = `key`
+ var __tag: Option[com.google.protobuf.ByteString] = `tag`
+
+ def __newMerged = RpbLink(
+ __bucket,
+ __key,
+ __tag
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 10 => __bucket = in.readBytes()
+ case 18 => __key = in.readBytes()
+ case 26 => __tag = in.readBytes()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbLink) = {
+ RpbLink(
+ m.`bucket`.orElse(`bucket`),
+ m.`key`.orElse(`key`),
+ m.`tag`.orElse(`tag`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbLink.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbLink {
+ @reflect.BeanProperty val defaultInstance = new RpbLink()
+
+ val BUCKET_FIELD_NUMBER = 1
+ val KEY_FIELD_NUMBER = 2
+ val TAG_FIELD_NUMBER = 3
+
+}
+final case class RpbBucketProps (
+ `nVal`: Option[Int] = None,
+ `allowMult`: Option[Boolean] = None
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbBucketProps] {
+
+ def setNVal(_f: Int) = copy(`nVal` = _f)
+ def setAllowMult(_f: Boolean) = copy(`allowMult` = _f)
+
+ def clearNVal = copy(`nVal` = None)
+ def clearAllowMult = copy(`allowMult` = None)
+
+ def writeTo(output: com.google.protobuf.CodedOutputStream) {
+ if (`nVal`.isDefined) output.writeUInt32(1, `nVal`.get)
+ if (`allowMult`.isDefined) output.writeBool(2, `allowMult`.get)
+ }
+
+ lazy val getSerializedSize = {
+ import com.google.protobuf.CodedOutputStream._
+ var size = 0
+ if (`nVal`.isDefined) size += computeUInt32Size(1, `nVal`.get)
+ if (`allowMult`.isDefined) size += computeBoolSize(2, `allowMult`.get)
+
+ size
+ }
+
+ def mergeFrom(in: com.google.protobuf.CodedInputStream, extensionRegistry: com.google.protobuf.ExtensionRegistryLite): RpbBucketProps = {
+ import com.google.protobuf.ExtensionRegistryLite.{getEmptyRegistry => _emptyRegistry}
+ var __nVal: Option[Int] = `nVal`
+ var __allowMult: Option[Boolean] = `allowMult`
+
+ def __newMerged = RpbBucketProps(
+ __nVal,
+ __allowMult
+ )
+ while (true) in.readTag match {
+ case 0 => return __newMerged
+ case 8 => __nVal = in.readUInt32()
+ case 16 => __allowMult = in.readBool()
+ case default => if (!in.skipField(default)) return __newMerged
+ }
+ null
+ }
+
+ def mergeFrom(m: RpbBucketProps) = {
+ RpbBucketProps(
+ m.`nVal`.orElse(`nVal`),
+ m.`allowMult`.orElse(`allowMult`)
+ )
+ }
+
+ def getDefaultInstanceForType = RpbBucketProps.defaultInstance
+ def clear = getDefaultInstanceForType
+ def isInitialized = true
+ def build = this
+ def buildPartial = this
+ def newBuilderForType = this
+ def toBuilder = this
+}
+
+object RpbBucketProps {
+ @reflect.BeanProperty val defaultInstance = new RpbBucketProps()
+
+ val N_VAL_FIELD_NUMBER = 1
+ val ALLOW_MULT_FIELD_NUMBER = 2
+
+}
+
+object RiakKvPB {
+ def registerAllExtensions(registry: com.google.protobuf.ExtensionRegistryLite) {
+ }
+
+}
209 src/main/scala/com/basho/riak/protobuf/RiakPB.scala
@@ -0,0 +1,209 @@
+// Generated by ScalaBuff, the Scala Protocol Buffers compiler. DO NOT EDIT!
+// source: riak.proto
+
+package com.basho.riak.protobuf
+
+final case class RpbErrorResp (
+ `errmsg`: com.google.protobuf.ByteString = com.google.protobuf.ByteString.EMPTY,
+ `errcode`: Int = 0
+) extends com.google.protobuf.GeneratedMessageLite
+ with Message[RpbErrorResp] {
+
+
+ def clearErrmsg = copy(`errmsg` = com.google.protobuf.ByteString.EMPTY)
+ def clearErrcode = copy(`errcode` = 0)
+