commit
6a43d07bae
1261 changed files with 84680 additions and 565407 deletions
|
@ -97,7 +97,7 @@ RUN set -x \
|
||||||
&& rm -rf "$GOPATH"
|
&& rm -rf "$GOPATH"
|
||||||
|
|
||||||
# Install crictl
|
# Install crictl
|
||||||
ENV CRICTL_COMMIT 16e6fe4d7199c5689db4630a9330e6a8a12cecd1
|
ENV CRICTL_COMMIT 9ff5e8f78a4182ab8d5ba9bcccdda5f338600eab
|
||||||
RUN set -x \
|
RUN set -x \
|
||||||
&& export GOPATH="$(mktemp -d)" \
|
&& export GOPATH="$(mktemp -d)" \
|
||||||
&& git clone https://github.com/kubernetes-incubator/cri-tools.git "$GOPATH/src/github.com/kubernetes-incubator/cri-tools" \
|
&& git clone https://github.com/kubernetes-incubator/cri-tools.git "$GOPATH/src/github.com/kubernetes-incubator/cri-tools" \
|
||||||
|
|
|
@ -162,8 +162,7 @@ func catchShutdown(gserver *grpc.Server, sserver *server.Server, hserver *http.S
|
||||||
*signalled = true
|
*signalled = true
|
||||||
gserver.GracefulStop()
|
gserver.GracefulStop()
|
||||||
hserver.Shutdown(context.Background())
|
hserver.Shutdown(context.Background())
|
||||||
// TODO(runcom): enable this after https://github.com/kubernetes/kubernetes/pull/51377
|
sserver.StopStreamServer()
|
||||||
//sserver.StopStreamServer()
|
|
||||||
sserver.StopExitMonitor()
|
sserver.StopExitMonitor()
|
||||||
if err := sserver.Shutdown(); err != nil {
|
if err := sserver.Shutdown(); err != nil {
|
||||||
logrus.Warnf("error shutting down main service %v", err)
|
logrus.Warnf("error shutting down main service %v", err)
|
||||||
|
@ -503,21 +502,18 @@ func main() {
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// TODO(runcom): enable this after https://github.com/kubernetes/kubernetes/pull/51377
|
streamServerCloseCh := service.StreamingServerCloseChan()
|
||||||
//streamServerCloseCh := service.StreamingServerCloseChan()
|
|
||||||
serverExitMonitorCh := service.ExitMonitorCloseChan()
|
serverExitMonitorCh := service.ExitMonitorCloseChan()
|
||||||
select {
|
select {
|
||||||
// TODO(runcom): enable this after https://github.com/kubernetes/kubernetes/pull/51377
|
case <-streamServerCloseCh:
|
||||||
//case <-streamServerCloseCh:
|
|
||||||
case <-serverExitMonitorCh:
|
case <-serverExitMonitorCh:
|
||||||
case <-serverCloseCh:
|
case <-serverCloseCh:
|
||||||
}
|
}
|
||||||
|
|
||||||
service.Shutdown()
|
service.Shutdown()
|
||||||
|
|
||||||
// TODO(runcom): enable this after https://github.com/kubernetes/kubernetes/pull/51377
|
<-streamServerCloseCh
|
||||||
//<-streamServerCloseCh
|
logrus.Debug("closed stream server")
|
||||||
//logrus.Debug("closed stream server")
|
|
||||||
<-serverExitMonitorCh
|
<-serverExitMonitorCh
|
||||||
logrus.Debug("closed exit monitor")
|
logrus.Debug("closed exit monitor")
|
||||||
<-serverCloseCh
|
<-serverCloseCh
|
||||||
|
|
|
@ -12,7 +12,6 @@ import (
|
||||||
"github.com/kubernetes-incubator/cri-o/client"
|
"github.com/kubernetes-incubator/cri-o/client"
|
||||||
"github.com/urfave/cli"
|
"github.com/urfave/cli"
|
||||||
"golang.org/x/net/context"
|
"golang.org/x/net/context"
|
||||||
remocommandconsts "k8s.io/apimachinery/pkg/util/remotecommand"
|
|
||||||
restclient "k8s.io/client-go/rest"
|
restclient "k8s.io/client-go/rest"
|
||||||
"k8s.io/client-go/tools/remotecommand"
|
"k8s.io/client-go/tools/remotecommand"
|
||||||
pb "k8s.io/kubernetes/pkg/kubelet/apis/cri/v1alpha1/runtime"
|
pb "k8s.io/kubernetes/pkg/kubelet/apis/cri/v1alpha1/runtime"
|
||||||
|
@ -527,13 +526,12 @@ func Exec(client pb.RuntimeServiceClient, ID string, tty bool, stdin bool, urlOn
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
streamExec, err := remotecommand.NewExecutor(&restclient.Config{}, "GET", execURL)
|
streamExec, err := remotecommand.NewSPDYExecutor(&restclient.Config{}, "GET", execURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
options := remotecommand.StreamOptions{
|
options := remotecommand.StreamOptions{
|
||||||
SupportedProtocols: remocommandconsts.SupportedStreamingProtocols,
|
|
||||||
Stdout: os.Stdout,
|
Stdout: os.Stdout,
|
||||||
Stderr: os.Stderr,
|
Stderr: os.Stderr,
|
||||||
Tty: tty,
|
Tty: tty,
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
git:
|
git:
|
||||||
repo: "https://github.com/kubernetes-incubator/cri-tools.git"
|
repo: "https://github.com/kubernetes-incubator/cri-tools.git"
|
||||||
dest: "{{ ansible_env.GOPATH }}/src/github.com/kubernetes-incubator/cri-tools"
|
dest: "{{ ansible_env.GOPATH }}/src/github.com/kubernetes-incubator/cri-tools"
|
||||||
version: "16e6fe4d7199c5689db4630a9330e6a8a12cecd1"
|
version: "9ff5e8f78a4182ab8d5ba9bcccdda5f338600eab"
|
||||||
|
|
||||||
- name: install crictl
|
- name: install crictl
|
||||||
command: "/usr/bin/go install github.com/kubernetes-incubator/cri-tools/cmd/crictl"
|
command: "/usr/bin/go install github.com/kubernetes-incubator/cri-tools/cmd/crictl"
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
git:
|
git:
|
||||||
repo: "https://github.com/runcom/kubernetes.git"
|
repo: "https://github.com/runcom/kubernetes.git"
|
||||||
dest: "{{ ansible_env.GOPATH }}/src/k8s.io/kubernetes"
|
dest: "{{ ansible_env.GOPATH }}/src/k8s.io/kubernetes"
|
||||||
version: "cri-o-node-e2e-patched"
|
version: "cri-o-patched-1.8"
|
||||||
|
|
||||||
- name: install etcd
|
- name: install etcd
|
||||||
command: "hack/install-etcd.sh"
|
command: "hack/install-etcd.sh"
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
---
|
---
|
||||||
|
|
||||||
|
- name: clone build and install kubernetes
|
||||||
|
include: "build/kubernetes.yml"
|
||||||
|
|
||||||
- name: enable and start CRI-O
|
- name: enable and start CRI-O
|
||||||
systemd:
|
systemd:
|
||||||
name: crio
|
name: crio
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
---
|
---
|
||||||
|
|
||||||
|
- name: clone build and install cri-tools
|
||||||
|
include: "build/cri-tools.yml"
|
||||||
|
|
||||||
- name: Make testing output verbose so it can be converted to xunit
|
- name: Make testing output verbose so it can be converted to xunit
|
||||||
lineinfile:
|
lineinfile:
|
||||||
dest: "{{ ansible_env.GOPATH }}/src/k8s.io/kubernetes/hack/make-rules/test.sh"
|
dest: "{{ ansible_env.GOPATH }}/src/k8s.io/kubernetes/hack/make-rules/test.sh"
|
||||||
|
|
|
@ -325,6 +325,8 @@ func (c *ContainerServer) LoadSandbox(id string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
spp := m.Annotations[annotations.SeccompProfilePath]
|
||||||
|
|
||||||
kubeAnnotations := make(map[string]string)
|
kubeAnnotations := make(map[string]string)
|
||||||
if err = json.Unmarshal([]byte(m.Annotations[annotations.Annotations]), &kubeAnnotations); err != nil {
|
if err = json.Unmarshal([]byte(m.Annotations[annotations.Annotations]), &kubeAnnotations); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -339,6 +341,7 @@ func (c *ContainerServer) LoadSandbox(id string) error {
|
||||||
}
|
}
|
||||||
sb.AddHostnamePath(m.Annotations[annotations.HostnamePath])
|
sb.AddHostnamePath(m.Annotations[annotations.HostnamePath])
|
||||||
sb.AddIP(ip)
|
sb.AddIP(ip)
|
||||||
|
sb.SetSeccompProfilePath(spp)
|
||||||
|
|
||||||
// We add a netNS only if we can load a permanent one.
|
// We add a netNS only if we can load a permanent one.
|
||||||
// Otherwise, the sandbox will live in the host namespace.
|
// Otherwise, the sandbox will live in the host namespace.
|
||||||
|
@ -516,6 +519,8 @@ func (c *ContainerServer) LoadContainer(id string) error {
|
||||||
}
|
}
|
||||||
ctr.SetSpec(&m)
|
ctr.SetSpec(&m)
|
||||||
ctr.SetMountPoint(m.Annotations[annotations.MountPoint])
|
ctr.SetMountPoint(m.Annotations[annotations.MountPoint])
|
||||||
|
spp := m.Annotations[annotations.SeccompProfilePath]
|
||||||
|
ctr.SetSeccompProfilePath(spp)
|
||||||
|
|
||||||
c.ContainerStateFromDisk(ctr)
|
c.ContainerStateFromDisk(ctr)
|
||||||
|
|
||||||
|
|
|
@ -157,6 +157,7 @@ type Sandbox struct {
|
||||||
stopped bool
|
stopped bool
|
||||||
// ipv4 or ipv6 cache
|
// ipv4 or ipv6 cache
|
||||||
ip string
|
ip string
|
||||||
|
seccompProfilePath string
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -205,6 +206,16 @@ func New(id, namespace, name, kubeName, logDir string, labels, annotations map[s
|
||||||
return sb, nil
|
return sb, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetSeccompProfilePath sets the seccomp profile path
|
||||||
|
func (s *Sandbox) SetSeccompProfilePath(pp string) {
|
||||||
|
s.seccompProfilePath = pp
|
||||||
|
}
|
||||||
|
|
||||||
|
// SeccompProfilePath returns the seccomp profile path
|
||||||
|
func (s *Sandbox) SeccompProfilePath() string {
|
||||||
|
return s.seccompProfilePath
|
||||||
|
}
|
||||||
|
|
||||||
// AddIP stores the ip in the sandbox
|
// AddIP stores the ip in the sandbox
|
||||||
func (s *Sandbox) AddIP(ip string) {
|
func (s *Sandbox) AddIP(ip string) {
|
||||||
s.ip = ip
|
s.ip = ip
|
||||||
|
|
|
@ -48,6 +48,7 @@ type Container struct {
|
||||||
imageRef string
|
imageRef string
|
||||||
volumes []ContainerVolume
|
volumes []ContainerVolume
|
||||||
mountPoint string
|
mountPoint string
|
||||||
|
seccompProfilePath string
|
||||||
spec *specs.Spec
|
spec *specs.Spec
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -156,6 +157,16 @@ func (c *Container) ID() string {
|
||||||
return c.id
|
return c.id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetSeccompProfilePath sets the seccomp profile path
|
||||||
|
func (c *Container) SetSeccompProfilePath(pp string) {
|
||||||
|
c.seccompProfilePath = pp
|
||||||
|
}
|
||||||
|
|
||||||
|
// SeccompProfilePath returns the seccomp profile path
|
||||||
|
func (c *Container) SeccompProfilePath() string {
|
||||||
|
return c.seccompProfilePath
|
||||||
|
}
|
||||||
|
|
||||||
// BundlePath returns the bundlePath of the container.
|
// BundlePath returns the bundlePath of the container.
|
||||||
func (c *Container) BundlePath() string {
|
func (c *Container) BundlePath() string {
|
||||||
return c.bundlePath
|
return c.bundlePath
|
||||||
|
|
19
oci/oci.go
19
oci/oci.go
|
@ -549,6 +549,25 @@ func (r *Runtime) ExecSync(c *Container, command []string, timeout int64) (resp
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdateContainer updates container resources
|
||||||
|
func (r *Runtime) UpdateContainer(c *Container, res *rspec.LinuxResources) error {
|
||||||
|
cmd := exec.Command(r.Path(c), "update", "--resources", "-", c.id)
|
||||||
|
var stdout bytes.Buffer
|
||||||
|
var stderr bytes.Buffer
|
||||||
|
cmd.Stdout = &stdout
|
||||||
|
cmd.Stderr = &stderr
|
||||||
|
jsonResources, err := json.Marshal(res)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
cmd.Stdin = bytes.NewReader(jsonResources)
|
||||||
|
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
return fmt.Errorf("updating resources for container %q failed: %v %v (%v)", c.id, stderr.String(), stdout.String(), err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func waitContainerStop(ctx context.Context, c *Container, timeout time.Duration) error {
|
func waitContainerStop(ctx context.Context, c *Container, timeout time.Duration) error {
|
||||||
done := make(chan struct{})
|
done := make(chan struct{})
|
||||||
// we could potentially re-use "done" channel to exit the loop on timeout
|
// we could potentially re-use "done" channel to exit the loop on timeout
|
||||||
|
|
|
@ -22,6 +22,9 @@ const (
|
||||||
// IP is the container ipv4 or ipv6 address
|
// IP is the container ipv4 or ipv6 address
|
||||||
IP = "io.kubernetes.cri-o.IP"
|
IP = "io.kubernetes.cri-o.IP"
|
||||||
|
|
||||||
|
// SeccompProfilePath is the node seccomp profile path
|
||||||
|
SeccompProfilePath = "io.kubernetes.cri-o.SeccompProfilePath"
|
||||||
|
|
||||||
// Image is the container image ID annotation
|
// Image is the container image ID annotation
|
||||||
Image = "io.kubernetes.cri-o.Image"
|
Image = "io.kubernetes.cri-o.Image"
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ import (
|
||||||
"path"
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/docker/docker/utils/templates"
|
"github.com/docker/docker/pkg/templates"
|
||||||
"github.com/opencontainers/runc/libcontainer/apparmor"
|
"github.com/opencontainers/runc/libcontainer/apparmor"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
@ -14,6 +15,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/docker/distribution/reference"
|
"github.com/docker/distribution/reference"
|
||||||
|
dockermounts "github.com/docker/docker/pkg/mount"
|
||||||
"github.com/docker/docker/pkg/stringid"
|
"github.com/docker/docker/pkg/stringid"
|
||||||
"github.com/docker/docker/pkg/symlink"
|
"github.com/docker/docker/pkg/symlink"
|
||||||
"github.com/kubernetes-incubator/cri-o/libkpod"
|
"github.com/kubernetes-incubator/cri-o/libkpod"
|
||||||
|
@ -101,7 +103,37 @@ func addOCIBindMounts(mountLabel string, containerConfig *pb.ContainerConfig, sp
|
||||||
if mount.Readonly {
|
if mount.Readonly {
|
||||||
options = []string{"ro"}
|
options = []string{"ro"}
|
||||||
}
|
}
|
||||||
options = append(options, []string{"rbind", "rprivate"}...)
|
options = append(options, "rbind")
|
||||||
|
|
||||||
|
// mount propagation
|
||||||
|
mountInfos, err := dockermounts.GetMounts()
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
switch mount.GetPropagation() {
|
||||||
|
case pb.MountPropagation_PROPAGATION_PRIVATE:
|
||||||
|
options = append(options, "rprivate")
|
||||||
|
// Since default root propagation in runc is rprivate ignore
|
||||||
|
// setting the root propagation
|
||||||
|
case pb.MountPropagation_PROPAGATION_BIDIRECTIONAL:
|
||||||
|
if err := ensureShared(src, mountInfos); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
options = append(options, "rshared")
|
||||||
|
specgen.SetLinuxRootPropagation("rshared")
|
||||||
|
case pb.MountPropagation_PROPAGATION_HOST_TO_CONTAINER:
|
||||||
|
if err := ensureSharedOrSlave(src, mountInfos); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
options = append(options, "rslave")
|
||||||
|
if specgen.Spec().Linux.RootfsPropagation != "rshared" &&
|
||||||
|
specgen.Spec().Linux.RootfsPropagation != "rslave" {
|
||||||
|
specgen.SetLinuxRootPropagation("rslave")
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
logrus.Warnf("Unknown propagation mode for hostPath %q", mount.HostPath)
|
||||||
|
options = append(options, "rprivate")
|
||||||
|
}
|
||||||
|
|
||||||
if mount.SelinuxRelabel {
|
if mount.SelinuxRelabel {
|
||||||
// Need a way in kubernetes to determine if the volume is shared or private
|
// Need a way in kubernetes to determine if the volume is shared or private
|
||||||
|
@ -126,6 +158,74 @@ func addOCIBindMounts(mountLabel string, containerConfig *pb.ContainerConfig, sp
|
||||||
return volumes, ociMounts, nil
|
return volumes, ociMounts, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ensure mount point on which path is mounted, is shared.
|
||||||
|
func ensureShared(path string, mountInfos []*dockermounts.Info) error {
|
||||||
|
sourceMount, optionalOpts, err := getSourceMount(path, mountInfos)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure source mount point is shared.
|
||||||
|
optsSplit := strings.Split(optionalOpts, " ")
|
||||||
|
for _, opt := range optsSplit {
|
||||||
|
if strings.HasPrefix(opt, "shared:") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Errorf("path %q is mounted on %q but it is not a shared mount", path, sourceMount)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure mount point on which path is mounted, is either shared or slave.
|
||||||
|
func ensureSharedOrSlave(path string, mountInfos []*dockermounts.Info) error {
|
||||||
|
sourceMount, optionalOpts, err := getSourceMount(path, mountInfos)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Make sure source mount point is shared.
|
||||||
|
optsSplit := strings.Split(optionalOpts, " ")
|
||||||
|
for _, opt := range optsSplit {
|
||||||
|
if strings.HasPrefix(opt, "shared:") {
|
||||||
|
return nil
|
||||||
|
} else if strings.HasPrefix(opt, "master:") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fmt.Errorf("path %q is mounted on %q but it is not a shared or slave mount", path, sourceMount)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getMountInfo(mountInfos []*dockermounts.Info, dir string) *dockermounts.Info {
|
||||||
|
for _, m := range mountInfos {
|
||||||
|
if m.Mountpoint == dir {
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getSourceMount(source string, mountInfos []*dockermounts.Info) (string, string, error) {
|
||||||
|
mountinfo := getMountInfo(mountInfos, source)
|
||||||
|
if mountinfo != nil {
|
||||||
|
return source, mountinfo.Optional, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
path := source
|
||||||
|
for {
|
||||||
|
path = filepath.Dir(path)
|
||||||
|
mountinfo = getMountInfo(mountInfos, path)
|
||||||
|
if mountinfo != nil {
|
||||||
|
return path, mountinfo.Optional, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if path == "/" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we are here, we did not find parent mount. Something is wrong.
|
||||||
|
return "", "", fmt.Errorf("Could not find source mount of %s", source)
|
||||||
|
}
|
||||||
|
|
||||||
func addImageVolumes(rootfs string, s *Server, containerInfo *storage.ContainerInfo, specgen *generate.Generator, mountLabel string) ([]rspec.Mount, error) {
|
func addImageVolumes(rootfs string, s *Server, containerInfo *storage.ContainerInfo, specgen *generate.Generator, mountLabel string) ([]rspec.Mount, error) {
|
||||||
mounts := []rspec.Mount{}
|
mounts := []rspec.Mount{}
|
||||||
for dest := range containerInfo.Config.Config.Volumes {
|
for dest := range containerInfo.Config.Config.Volumes {
|
||||||
|
@ -770,6 +870,7 @@ func (s *Server) createSandboxContainer(ctx context.Context, containerID string,
|
||||||
}
|
}
|
||||||
specgen.SetProcessSelinuxLabel(processLabel)
|
specgen.SetProcessSelinuxLabel(processLabel)
|
||||||
specgen.SetLinuxMountLabel(mountLabel)
|
specgen.SetLinuxMountLabel(mountLabel)
|
||||||
|
specgen.SetProcessNoNewPrivileges(linux.GetSecurityContext().GetNoNewPrivs())
|
||||||
|
|
||||||
if containerConfig.GetLinux().GetSecurityContext() != nil &&
|
if containerConfig.GetLinux().GetSecurityContext() != nil &&
|
||||||
!containerConfig.GetLinux().GetSecurityContext().Privileged {
|
!containerConfig.GetLinux().GetSecurityContext().Privileged {
|
||||||
|
@ -947,13 +1048,16 @@ func (s *Server) createSandboxContainer(ctx context.Context, containerID string,
|
||||||
}
|
}
|
||||||
specgen.AddAnnotation(annotations.Annotations, string(kubeAnnotationsJSON))
|
specgen.AddAnnotation(annotations.Annotations, string(kubeAnnotationsJSON))
|
||||||
|
|
||||||
metaname := metadata.Name
|
spp := containerConfig.GetLinux().GetSecurityContext().GetSeccompProfilePath()
|
||||||
if !privileged {
|
if !privileged {
|
||||||
if err = s.setupSeccomp(&specgen, metaname, sb.Annotations()); err != nil {
|
if err = s.setupSeccomp(&specgen, spp); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
specgen.AddAnnotation(annotations.SeccompProfilePath, spp)
|
||||||
|
// TODO(runcom): add spp to container...
|
||||||
|
|
||||||
|
metaname := metadata.Name
|
||||||
attempt := metadata.Attempt
|
attempt := metadata.Attempt
|
||||||
containerInfo, err := s.StorageRuntimeServer().CreateContainer(s.ImageContext(),
|
containerInfo, err := s.StorageRuntimeServer().CreateContainer(s.ImageContext(),
|
||||||
sb.Name(), sb.ID(),
|
sb.Name(), sb.ID(),
|
||||||
|
@ -1100,14 +1204,11 @@ func (s *Server) createSandboxContainer(ctx context.Context, containerID string,
|
||||||
return container, nil
|
return container, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) setupSeccomp(specgen *generate.Generator, cname string, sbAnnotations map[string]string) error {
|
func (s *Server) setupSeccomp(specgen *generate.Generator, profile string) error {
|
||||||
profile, ok := sbAnnotations["container.seccomp.security.alpha.kubernetes.io/"+cname]
|
if profile == "" {
|
||||||
if !ok {
|
|
||||||
profile, ok = sbAnnotations["seccomp.security.alpha.kubernetes.io/pod"]
|
|
||||||
if !ok {
|
|
||||||
// running w/o seccomp, aka unconfined
|
// running w/o seccomp, aka unconfined
|
||||||
profile = seccompUnconfined
|
specgen.Spec().Linux.Seccomp = nil
|
||||||
}
|
return nil
|
||||||
}
|
}
|
||||||
if !s.seccompEnabled {
|
if !s.seccompEnabled {
|
||||||
if profile != seccompUnconfined {
|
if profile != seccompUnconfined {
|
||||||
|
@ -1126,8 +1227,12 @@ func (s *Server) setupSeccomp(specgen *generate.Generator, cname string, sbAnnot
|
||||||
if !strings.HasPrefix(profile, seccompLocalhostPrefix) {
|
if !strings.HasPrefix(profile, seccompLocalhostPrefix) {
|
||||||
return fmt.Errorf("unknown seccomp profile option: %q", profile)
|
return fmt.Errorf("unknown seccomp profile option: %q", profile)
|
||||||
}
|
}
|
||||||
// FIXME: https://github.com/kubernetes/kubernetes/issues/39128
|
fname := strings.TrimPrefix(profile, "localhost/")
|
||||||
return nil
|
file, err := ioutil.ReadFile(filepath.FromSlash(fname))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cannot load seccomp profile %q: %v", fname, err)
|
||||||
|
}
|
||||||
|
return seccomp.LoadProfileFromBytes(file, specgen)
|
||||||
}
|
}
|
||||||
|
|
||||||
// getAppArmorProfileName gets the profile name for the given container.
|
// getAppArmorProfileName gets the profile name for the given container.
|
||||||
|
|
|
@ -13,8 +13,8 @@ import (
|
||||||
"k8s.io/client-go/tools/remotecommand"
|
"k8s.io/client-go/tools/remotecommand"
|
||||||
pb "k8s.io/kubernetes/pkg/kubelet/apis/cri/v1alpha1/runtime"
|
pb "k8s.io/kubernetes/pkg/kubelet/apis/cri/v1alpha1/runtime"
|
||||||
kubecontainer "k8s.io/kubernetes/pkg/kubelet/container"
|
kubecontainer "k8s.io/kubernetes/pkg/kubelet/container"
|
||||||
utilexec "k8s.io/kubernetes/pkg/util/exec"
|
|
||||||
"k8s.io/kubernetes/pkg/util/term"
|
"k8s.io/kubernetes/pkg/util/term"
|
||||||
|
utilexec "k8s.io/utils/exec"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Exec prepares a streaming endpoint to execute a command in the container.
|
// Exec prepares a streaming endpoint to execute a command in the container.
|
||||||
|
|
38
server/container_update_resources.go
Normal file
38
server/container_update_resources.go
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
package server
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gogo/protobuf/proto"
|
||||||
|
rspec "github.com/opencontainers/runtime-spec/specs-go"
|
||||||
|
"golang.org/x/net/context"
|
||||||
|
pb "k8s.io/kubernetes/pkg/kubelet/apis/cri/v1alpha1/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
// UpdateContainerResources updates ContainerConfig of the container.
|
||||||
|
func (s *Server) UpdateContainerResources(ctx context.Context, req *pb.UpdateContainerResourcesRequest) (*pb.UpdateContainerResourcesResponse, error) {
|
||||||
|
c, err := s.GetContainerFromRequest(req.GetContainerId())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
resources := toOCIResources(req.GetLinux())
|
||||||
|
if err := s.Runtime().UpdateContainer(c, resources); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &pb.UpdateContainerResourcesResponse{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// toOCIResources converts CRI resource constraints to OCI.
|
||||||
|
func toOCIResources(r *pb.LinuxContainerResources) *rspec.LinuxResources {
|
||||||
|
return &rspec.LinuxResources{
|
||||||
|
CPU: &rspec.LinuxCPU{
|
||||||
|
Shares: proto.Uint64(uint64(r.GetCpuShares())),
|
||||||
|
Quota: proto.Int64(r.GetCpuQuota()),
|
||||||
|
Period: proto.Uint64(uint64(r.GetCpuPeriod())),
|
||||||
|
Cpus: r.GetCpusetCpus(),
|
||||||
|
Mems: r.GetCpusetMems(),
|
||||||
|
},
|
||||||
|
Memory: &rspec.LinuxMemory{
|
||||||
|
Limit: proto.Int64(r.GetMemoryLimitInBytes()),
|
||||||
|
},
|
||||||
|
// TODO(runcom): OOMScoreAdj is missing
|
||||||
|
}
|
||||||
|
}
|
|
@ -23,7 +23,7 @@ import (
|
||||||
"github.com/sirupsen/logrus"
|
"github.com/sirupsen/logrus"
|
||||||
"golang.org/x/net/context"
|
"golang.org/x/net/context"
|
||||||
"golang.org/x/sys/unix"
|
"golang.org/x/sys/unix"
|
||||||
"k8s.io/kubernetes/pkg/api/v1"
|
"k8s.io/api/core/v1"
|
||||||
pb "k8s.io/kubernetes/pkg/kubelet/apis/cri/v1alpha1/runtime"
|
pb "k8s.io/kubernetes/pkg/kubelet/apis/cri/v1alpha1/runtime"
|
||||||
"k8s.io/kubernetes/pkg/kubelet/leaky"
|
"k8s.io/kubernetes/pkg/kubelet/leaky"
|
||||||
"k8s.io/kubernetes/pkg/kubelet/network/hostport"
|
"k8s.io/kubernetes/pkg/kubelet/network/hostport"
|
||||||
|
@ -398,15 +398,8 @@ func (s *Server) RunPodSandbox(ctx context.Context, req *pb.RunPodSandboxRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
// extract linux sysctls from annotations and pass down to oci runtime
|
// extract linux sysctls from annotations and pass down to oci runtime
|
||||||
safe, unsafe, err := SysctlsFromPodAnnotations(kubeAnnotations)
|
for key, value := range req.GetConfig().GetLinux().GetSysctls() {
|
||||||
if err != nil {
|
g.AddLinuxSysctl(key, value)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for _, sysctl := range safe {
|
|
||||||
g.AddLinuxSysctl(sysctl.Name, sysctl.Value)
|
|
||||||
}
|
|
||||||
for _, sysctl := range unsafe {
|
|
||||||
g.AddLinuxSysctl(sysctl.Name, sysctl.Value)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set OOM score adjust of the infra container to be very low
|
// Set OOM score adjust of the infra container to be very low
|
||||||
|
@ -504,6 +497,15 @@ func (s *Server) RunPodSandbox(ctx context.Context, req *pb.RunPodSandboxRequest
|
||||||
g.AddAnnotation(annotations.IP, ip)
|
g.AddAnnotation(annotations.IP, ip)
|
||||||
sb.AddIP(ip)
|
sb.AddIP(ip)
|
||||||
|
|
||||||
|
spp := req.GetConfig().GetLinux().GetSecurityContext().GetSeccompProfilePath()
|
||||||
|
g.AddAnnotation(annotations.SeccompProfilePath, spp)
|
||||||
|
sb.SetSeccompProfilePath(spp)
|
||||||
|
if !privileged {
|
||||||
|
if err = s.setupSeccomp(&g, spp); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
err = g.SaveToFile(filepath.Join(podContainer.Dir, "config.json"), saveOptions)
|
err = g.SaveToFile(filepath.Join(podContainer.Dir, "config.json"), saveOptions)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to save template configuration for pod sandbox %s(%s): %v", sb.Name(), id, err)
|
return nil, fmt.Errorf("failed to save template configuration for pod sandbox %s(%s): %v", sb.Name(), id, err)
|
||||||
|
|
|
@ -30,8 +30,8 @@ import (
|
||||||
"k8s.io/kubernetes/pkg/kubelet/server/streaming"
|
"k8s.io/kubernetes/pkg/kubelet/server/streaming"
|
||||||
iptablesproxy "k8s.io/kubernetes/pkg/proxy/iptables"
|
iptablesproxy "k8s.io/kubernetes/pkg/proxy/iptables"
|
||||||
utildbus "k8s.io/kubernetes/pkg/util/dbus"
|
utildbus "k8s.io/kubernetes/pkg/util/dbus"
|
||||||
utilexec "k8s.io/kubernetes/pkg/util/exec"
|
|
||||||
utiliptables "k8s.io/kubernetes/pkg/util/iptables"
|
utiliptables "k8s.io/kubernetes/pkg/util/iptables"
|
||||||
|
utilexec "k8s.io/utils/exec"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -200,7 +200,7 @@ func New(config *Config) (*Server, error) {
|
||||||
}
|
}
|
||||||
iptInterface := utiliptables.New(utilexec.New(), utildbus.New(), utiliptables.ProtocolIpv4)
|
iptInterface := utiliptables.New(utilexec.New(), utildbus.New(), utiliptables.ProtocolIpv4)
|
||||||
iptInterface.EnsureChain(utiliptables.TableNAT, iptablesproxy.KubeMarkMasqChain)
|
iptInterface.EnsureChain(utiliptables.TableNAT, iptablesproxy.KubeMarkMasqChain)
|
||||||
hostportManager := hostport.NewHostportManager()
|
hostportManager := hostport.NewHostportManager(iptInterface)
|
||||||
|
|
||||||
s := &Server{
|
s := &Server{
|
||||||
ContainerServer: containerServer,
|
ContainerServer: containerServer,
|
||||||
|
|
|
@ -871,3 +871,60 @@ function teardown() {
|
||||||
cleanup_pods
|
cleanup_pods
|
||||||
stop_crio
|
stop_crio
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@test "ctr update resources" {
|
||||||
|
start_crio
|
||||||
|
run crioctl pod run --config "$TESTDATA"/sandbox_config.json
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
pod_id="$output"
|
||||||
|
run crioctl ctr create --config "$TESTDATA"/container_redis.json --pod "$pod_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
ctr_id="$output"
|
||||||
|
run crioctl ctr start --id "$ctr_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" sh -c "cat /sys/fs/cgroup/memory/memory.limit_in_bytes"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "209715200" ]]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" sh -c "cat /sys/fs/cgroup/cpu/cpu.shares"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "512" ]]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" sh -c "cat /sys/fs/cgroup/cpu/cpu.cfs_period_us"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "10000" ]]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" sh -c "cat /sys/fs/cgroup/cpu/cpu.cfs_quota_us"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "20000" ]]
|
||||||
|
|
||||||
|
run crictl update --memory 524288000 --cpu-period 20000 --cpu-quota 10000 --cpu-share 256 "$ctr_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" sh -c "cat /sys/fs/cgroup/memory/memory.limit_in_bytes"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "524288000" ]]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" sh -c "cat /sys/fs/cgroup/cpu/cpu.shares"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "256" ]]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" sh -c "cat /sys/fs/cgroup/cpu/cpu.cfs_period_us"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "20000" ]]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" sh -c "cat /sys/fs/cgroup/cpu/cpu.cfs_quota_us"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "10000" ]]
|
||||||
|
|
||||||
|
cleanup_ctrs
|
||||||
|
cleanup_pods
|
||||||
|
stop_crio
|
||||||
|
}
|
||||||
|
|
221
test/ctr_seccomp.bats
Normal file
221
test/ctr_seccomp.bats
Normal file
|
@ -0,0 +1,221 @@
|
||||||
|
#!/usr/bin/env bats
|
||||||
|
|
||||||
|
load helpers
|
||||||
|
|
||||||
|
function teardown() {
|
||||||
|
cleanup_test
|
||||||
|
}
|
||||||
|
|
||||||
|
# 1. test running with ctr unconfined
|
||||||
|
# test that we can run with a syscall which would be otherwise blocked
|
||||||
|
@test "ctr seccomp profiles unconfined" {
|
||||||
|
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
||||||
|
enabled=$(is_seccomp_enabled)
|
||||||
|
if [[ "$enabled" -eq 0 ]]; then
|
||||||
|
skip "skip this test since seccomp is not enabled."
|
||||||
|
fi
|
||||||
|
|
||||||
|
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
start_crio "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
sed -e 's/%VALUE%/unconfined/g' "$TESTDATA"/container_config_seccomp.json > "$TESTDIR"/seccomp1.json
|
||||||
|
run crioctl pod run --name seccomp1 --config "$TESTDATA"/sandbox_config.json
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
pod_id="$output"
|
||||||
|
run crioctl ctr create --name testname --config "$TESTDIR"/seccomp1.json --pod "$pod_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
ctr_id="$output"
|
||||||
|
run crioctl ctr start --id "$ctr_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
|
cleanup_ctrs
|
||||||
|
cleanup_pods
|
||||||
|
stop_crio
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2. test running with ctr runtime/default
|
||||||
|
# test that we cannot run with a syscall blocked by the default seccomp profile
|
||||||
|
@test "ctr seccomp profiles runtime/default" {
|
||||||
|
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
||||||
|
enabled=$(is_seccomp_enabled)
|
||||||
|
if [[ "$enabled" -eq 0 ]]; then
|
||||||
|
skip "skip this test since seccomp is not enabled."
|
||||||
|
fi
|
||||||
|
|
||||||
|
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
start_crio "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
sed -e 's/%VALUE%/runtime\/default/g' "$TESTDATA"/container_config_seccomp.json > "$TESTDIR"/seccomp2.json
|
||||||
|
run crioctl pod run --name seccomp2 --config "$TESTDATA"/sandbox_config.json
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
pod_id="$output"
|
||||||
|
run crioctl ctr create --name testname2 --config "$TESTDIR"/seccomp2.json --pod "$pod_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
ctr_id="$output"
|
||||||
|
run crioctl ctr start --id "$ctr_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "Exit code: 1" ]]
|
||||||
|
[[ "$output" =~ "Operation not permitted" ]]
|
||||||
|
|
||||||
|
cleanup_ctrs
|
||||||
|
cleanup_pods
|
||||||
|
stop_crio
|
||||||
|
}
|
||||||
|
|
||||||
|
# 3. test running with ctr unconfined and profile empty
|
||||||
|
# test that we can run with a syscall which would be otherwise blocked
|
||||||
|
@test "ctr seccomp profiles unconfined by empty field" {
|
||||||
|
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
||||||
|
enabled=$(is_seccomp_enabled)
|
||||||
|
if [[ "$enabled" -eq 0 ]]; then
|
||||||
|
skip "skip this test since seccomp is not enabled."
|
||||||
|
fi
|
||||||
|
|
||||||
|
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
start_crio "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
sed -e 's/%VALUE%//g' "$TESTDATA"/container_config_seccomp.json > "$TESTDIR"/seccomp1.json
|
||||||
|
run crioctl pod run --name seccomp1 --config "$TESTDATA"/sandbox_config.json
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
pod_id="$output"
|
||||||
|
run crioctl ctr create --name testname --config "$TESTDIR"/seccomp1.json --pod "$pod_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
ctr_id="$output"
|
||||||
|
run crioctl ctr start --id "$ctr_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
|
||||||
|
cleanup_ctrs
|
||||||
|
cleanup_pods
|
||||||
|
stop_crio
|
||||||
|
}
|
||||||
|
|
||||||
|
# 4. test running with ctr wrong profile name
|
||||||
|
@test "ctr seccomp profiles wrong profile name" {
|
||||||
|
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
||||||
|
enabled=$(is_seccomp_enabled)
|
||||||
|
if [[ "$enabled" -eq 0 ]]; then
|
||||||
|
skip "skip this test since seccomp is not enabled."
|
||||||
|
fi
|
||||||
|
|
||||||
|
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
start_crio "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
sed -e 's/%VALUE%/wontwork/g' "$TESTDATA"/container_config_seccomp.json > "$TESTDIR"/seccomp1.json
|
||||||
|
run crioctl pod run --name seccomp1 --config "$TESTDATA"/sandbox_config.json
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
pod_id="$output"
|
||||||
|
run crioctl ctr create --name testname --config "$TESTDIR"/seccomp1.json --pod "$pod_id"
|
||||||
|
echo "$output"
|
||||||
|
[[ "$status" -ne 0 ]]
|
||||||
|
[[ "$output" =~ "unknown seccomp profile option:" ]]
|
||||||
|
[[ "$output" =~ "wontwork" ]]
|
||||||
|
|
||||||
|
cleanup_ctrs
|
||||||
|
cleanup_pods
|
||||||
|
stop_crio
|
||||||
|
}
|
||||||
|
|
||||||
|
# 5. test running with ctr localhost/profile_name
|
||||||
|
@test "ctr seccomp profiles localhost/profile_name" {
|
||||||
|
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
||||||
|
enabled=$(is_seccomp_enabled)
|
||||||
|
if [[ "$enabled" -eq 0 ]]; then
|
||||||
|
skip "skip this test since seccomp is not enabled."
|
||||||
|
fi
|
||||||
|
|
||||||
|
start_crio
|
||||||
|
|
||||||
|
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
sed -e 's@%VALUE%@localhost/'"$TESTDIR"'/seccomp_profile1.json@g' "$TESTDATA"/container_config_seccomp.json > "$TESTDIR"/seccomp1.json
|
||||||
|
run crioctl pod run --name seccomp1 --config "$TESTDATA"/sandbox_config.json
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
pod_id="$output"
|
||||||
|
run crioctl ctr create --name testname --config "$TESTDIR"/seccomp1.json --pod "$pod_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
ctr_id="$output"
|
||||||
|
run crioctl ctr start --id "$ctr_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "Exit code: 1" ]]
|
||||||
|
[[ "$output" =~ "Operation not permitted" ]]
|
||||||
|
|
||||||
|
cleanup_ctrs
|
||||||
|
cleanup_pods
|
||||||
|
stop_crio
|
||||||
|
}
|
||||||
|
|
||||||
|
# 6. test running with ctr docker/default
|
||||||
|
# test that we cannot run with a syscall blocked by the default seccomp profile
|
||||||
|
@test "ctr seccomp profiles runtime/default" {
|
||||||
|
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
||||||
|
enabled=$(is_seccomp_enabled)
|
||||||
|
if [[ "$enabled" -eq 0 ]]; then
|
||||||
|
skip "skip this test since seccomp is not enabled."
|
||||||
|
fi
|
||||||
|
|
||||||
|
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
start_crio "$TESTDIR"/seccomp_profile1.json
|
||||||
|
|
||||||
|
sed -e 's/%VALUE%/docker\/default/g' "$TESTDATA"/container_config_seccomp.json > "$TESTDIR"/seccomp2.json
|
||||||
|
run crioctl pod run --name seccomp2 --config "$TESTDATA"/sandbox_config.json
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
pod_id="$output"
|
||||||
|
run crioctl ctr create --name testname2 --config "$TESTDIR"/seccomp2.json --pod "$pod_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
ctr_id="$output"
|
||||||
|
run crioctl ctr start --id "$ctr_id"
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
||||||
|
echo "$output"
|
||||||
|
[ "$status" -eq 0 ]
|
||||||
|
[[ "$output" =~ "Exit code: 1" ]]
|
||||||
|
[[ "$output" =~ "Operation not permitted" ]]
|
||||||
|
|
||||||
|
cleanup_ctrs
|
||||||
|
cleanup_pods
|
||||||
|
stop_crio
|
||||||
|
}
|
|
@ -210,7 +210,7 @@ function teardown() {
|
||||||
|
|
||||||
@test "pass pod sysctls to runtime" {
|
@test "pass pod sysctls to runtime" {
|
||||||
start_crio
|
start_crio
|
||||||
run crioctl pod run --config "$TESTDATA"/sandbox_config.json
|
run crioctl pod run --config "$TESTDATA"/sandbox_config_sysctl.json
|
||||||
echo "$output"
|
echo "$output"
|
||||||
[ "$status" -eq 0 ]
|
[ "$status" -eq 0 ]
|
||||||
pod_id="$output"
|
pod_id="$output"
|
||||||
|
|
|
@ -1,368 +0,0 @@
|
||||||
#!/usr/bin/env bats
|
|
||||||
|
|
||||||
load helpers
|
|
||||||
|
|
||||||
function teardown() {
|
|
||||||
cleanup_test
|
|
||||||
}
|
|
||||||
|
|
||||||
# 1. test running with ctr unconfined
|
|
||||||
# test that we can run with a syscall which would be otherwise blocked
|
|
||||||
@test "ctr seccomp profiles unconfined" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
sed -e 's/%VALUE%/,"container\.seccomp\.security\.alpha\.kubernetes\.io\/testname": "unconfined"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp1.json
|
|
||||||
run crioctl pod run --name seccomp1 --config "$TESTDIR"/seccomp1.json
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
pod_id="$output"
|
|
||||||
run crioctl ctr create --name testname --config "$TESTDATA"/container_redis.json --pod "$pod_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
ctr_id="$output"
|
|
||||||
run crioctl ctr start --id "$ctr_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
|
|
||||||
cleanup_ctrs
|
|
||||||
cleanup_pods
|
|
||||||
stop_crio
|
|
||||||
}
|
|
||||||
|
|
||||||
# 2. test running with ctr runtime/default
|
|
||||||
# test that we cannot run with a syscall blocked by the default seccomp profile
|
|
||||||
@test "ctr seccomp profiles runtime/default" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
sed -e 's/%VALUE%/,"container\.seccomp\.security\.alpha\.kubernetes\.io\/testname2": "runtime\/default"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp2.json
|
|
||||||
run crioctl pod run --name seccomp2 --config "$TESTDIR"/seccomp2.json
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
pod_id="$output"
|
|
||||||
run crioctl ctr create --name testname2 --config "$TESTDATA"/container_redis.json --pod "$pod_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
ctr_id="$output"
|
|
||||||
run crioctl ctr start --id "$ctr_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
[[ "$output" =~ "Exit code: 1" ]]
|
|
||||||
[[ "$output" =~ "Operation not permitted" ]]
|
|
||||||
|
|
||||||
cleanup_ctrs
|
|
||||||
cleanup_pods
|
|
||||||
stop_crio
|
|
||||||
}
|
|
||||||
|
|
||||||
# 3. test running with ctr wrong profile name
|
|
||||||
@test "ctr seccomp profiles wrong profile name" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
sed -e 's/%VALUE%/,"container\.seccomp\.security\.alpha\.kubernetes\.io\/testname3": "notgood"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp3.json
|
|
||||||
run crioctl pod run --name seccomp3 --config "$TESTDIR"/seccomp3.json
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
pod_id="$output"
|
|
||||||
run crioctl ctr create --name testname3 --config "$TESTDATA"/container_config.json --pod "$pod_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -ne 0 ]
|
|
||||||
[[ "$output" =~ "unknown seccomp profile option:" ]]
|
|
||||||
[[ "$output" =~ "notgood" ]]
|
|
||||||
|
|
||||||
cleanup_ctrs
|
|
||||||
cleanup_pods
|
|
||||||
stop_crio
|
|
||||||
}
|
|
||||||
|
|
||||||
# TODO(runcom): need https://issues.k8s.io/36997
|
|
||||||
# 4. test running with ctr localhost/profile_name
|
|
||||||
@test "ctr seccomp profiles localhost/profile_name" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
#sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
#sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
#sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
#start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
skip "need https://issues.k8s.io/36997"
|
|
||||||
}
|
|
||||||
|
|
||||||
# 5. test running with unkwown ctr profile falls back to pod profile
|
|
||||||
# unknown ctr -> unconfined
|
|
||||||
# pod -> runtime/default
|
|
||||||
# result: fail chmod
|
|
||||||
@test "ctr seccomp profiles falls back to pod profile" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
sed -e 's/%VALUE%/,"container\.seccomp\.security\.alpha\.kubernetes\.io\/redhat\.test\.crio-seccomp2-1-testname2-0-not-exists": "unconfined", "seccomp\.security\.alpha\.kubernetes\.io\/pod": "runtime\/default"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp5.json
|
|
||||||
run crioctl pod run --name seccomp5 --config "$TESTDIR"/seccomp5.json
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
pod_id="$output"
|
|
||||||
run crioctl ctr create --config "$TESTDATA"/container_redis.json --pod "$pod_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
ctr_id="$output"
|
|
||||||
run crioctl ctr start --id "$ctr_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
[[ "$output" =~ "Exit code: 1" ]]
|
|
||||||
[[ "$output" =~ "Operation not permitted" ]]
|
|
||||||
|
|
||||||
cleanup_ctrs
|
|
||||||
cleanup_pods
|
|
||||||
stop_crio
|
|
||||||
}
|
|
||||||
|
|
||||||
# 6. test running with unkwown ctr profile and no pod, falls back to unconfined
|
|
||||||
# unknown ctr -> runtime/default
|
|
||||||
# pod -> NO
|
|
||||||
# result: success, running unconfined
|
|
||||||
@test "ctr seccomp profiles falls back to unconfined" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
sed -e 's/%VALUE%/,"container\.seccomp\.security\.alpha\.kubernetes\.io\/redhat\.test\.crio-seccomp6-1-testname6-0-not-exists": "runtime-default"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp6.json
|
|
||||||
run crioctl pod run --name seccomp6 --config "$TESTDIR"/seccomp6.json
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
pod_id="$output"
|
|
||||||
run crioctl ctr create --name testname6 --config "$TESTDATA"/container_redis.json --pod "$pod_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
ctr_id="$output"
|
|
||||||
run crioctl ctr start --id "$ctr_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
|
|
||||||
cleanup_ctrs
|
|
||||||
cleanup_pods
|
|
||||||
stop_crio
|
|
||||||
}
|
|
||||||
|
|
||||||
# 1. test running with pod unconfined
|
|
||||||
# test that we can run with a syscall which would be otherwise blocked
|
|
||||||
@test "pod seccomp profiles unconfined" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
sed -e 's/%VALUE%/,"seccomp\.security\.alpha\.kubernetes\.io\/pod": "unconfined"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp1.json
|
|
||||||
run crioctl pod run --name seccomp1 --config "$TESTDIR"/seccomp1.json
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
pod_id="$output"
|
|
||||||
run crioctl ctr create --config "$TESTDATA"/container_redis.json --pod "$pod_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
ctr_id="$output"
|
|
||||||
run crioctl ctr start --id "$ctr_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
|
|
||||||
cleanup_ctrs
|
|
||||||
cleanup_pods
|
|
||||||
stop_crio
|
|
||||||
}
|
|
||||||
|
|
||||||
# 2. test running with pod runtime/default
|
|
||||||
# test that we cannot run with a syscall blocked by the default seccomp profile
|
|
||||||
@test "pod seccomp profiles runtime/default" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
sed -e 's/%VALUE%/,"seccomp\.security\.alpha\.kubernetes\.io\/pod": "runtime\/default"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp2.json
|
|
||||||
run crioctl pod run --name seccomp2 --config "$TESTDIR"/seccomp2.json
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
pod_id="$output"
|
|
||||||
run crioctl ctr create --config "$TESTDATA"/container_redis.json --pod "$pod_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
ctr_id="$output"
|
|
||||||
run crioctl ctr start --id "$ctr_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
[[ "$output" =~ "Exit code: 1" ]]
|
|
||||||
[[ "$output" =~ "Operation not permitted" ]]
|
|
||||||
|
|
||||||
cleanup_ctrs
|
|
||||||
cleanup_pods
|
|
||||||
stop_crio
|
|
||||||
}
|
|
||||||
|
|
||||||
# 3. test running with pod wrong profile name
|
|
||||||
@test "pod seccomp profiles wrong profile name" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
# 3. test running with pod wrong profile name
|
|
||||||
sed -e 's/%VALUE%/,"seccomp\.security\.alpha\.kubernetes\.io\/pod": "notgood"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp3.json
|
|
||||||
run crioctl pod run --name seccomp3 --config "$TESTDIR"/seccomp3.json
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
pod_id="$output"
|
|
||||||
run crioctl ctr create --config "$TESTDATA"/container_config.json --pod "$pod_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -ne 0 ]
|
|
||||||
[[ "$output" =~ "unknown seccomp profile option:" ]]
|
|
||||||
[[ "$output" =~ "notgood" ]]
|
|
||||||
|
|
||||||
cleanup_ctrs
|
|
||||||
cleanup_pods
|
|
||||||
stop_crio
|
|
||||||
}
|
|
||||||
|
|
||||||
# TODO(runcom): need https://issues.k8s.io/36997
|
|
||||||
# 4. test running with pod localhost/profile_name
|
|
||||||
@test "pod seccomp profiles localhost/profile_name" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
#sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
#sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
#sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
#start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
skip "need https://issues.k8s.io/36997"
|
|
||||||
}
|
|
||||||
|
|
||||||
# test running with ctr docker/default
|
|
||||||
# test that we cannot run with a syscall blocked by the default seccomp profile
|
|
||||||
@test "ctr seccomp profiles docker/default" {
|
|
||||||
# this test requires seccomp, so skip this test if seccomp is not enabled.
|
|
||||||
enabled=$(is_seccomp_enabled)
|
|
||||||
if [[ "$enabled" -eq 0 ]]; then
|
|
||||||
skip "skip this test since seccomp is not enabled."
|
|
||||||
fi
|
|
||||||
|
|
||||||
sed -e 's/"chmod",//' "$CRIO_ROOT"/cri-o/seccomp.json > "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmod",//' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
sed -i 's/"fchmodat",//g' "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
start_crio "$TESTDIR"/seccomp_profile1.json
|
|
||||||
|
|
||||||
sed -e 's/%VALUE%/,"container\.seccomp\.security\.alpha\.kubernetes\.io\/testname2": "docker\/default"/g' "$TESTDATA"/sandbox_config_seccomp.json > "$TESTDIR"/seccomp2.json
|
|
||||||
run crioctl pod run --name seccomp2 --config "$TESTDIR"/seccomp2.json
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
pod_id="$output"
|
|
||||||
run crioctl ctr create --name testname2 --config "$TESTDATA"/container_redis.json --pod "$pod_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
ctr_id="$output"
|
|
||||||
run crioctl ctr start --id "$ctr_id"
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
run crioctl ctr execsync --id "$ctr_id" chmod 777 .
|
|
||||||
echo "$output"
|
|
||||||
[ "$status" -eq 0 ]
|
|
||||||
[[ "$output" =~ "Exit code: 1" ]]
|
|
||||||
[[ "$output" =~ "Operation not permitted" ]]
|
|
||||||
|
|
||||||
cleanup_ctrs
|
|
||||||
cleanup_pods
|
|
||||||
stop_crio
|
|
||||||
}
|
|
9
test/testdata/container_config_seccomp.json
vendored
9
test/testdata/container_config_seccomp.json
vendored
|
@ -6,13 +6,11 @@
|
||||||
"image": {
|
"image": {
|
||||||
"image": "redis:alpine"
|
"image": "redis:alpine"
|
||||||
},
|
},
|
||||||
"command": [
|
|
||||||
"/bin/bash"
|
|
||||||
],
|
|
||||||
"args": [
|
"args": [
|
||||||
"/bin/chmod", "777", "."
|
"docker-entrypoint.sh",
|
||||||
|
"redis-server"
|
||||||
],
|
],
|
||||||
"working_dir": "/",
|
"working_dir": "/data",
|
||||||
"envs": [
|
"envs": [
|
||||||
{
|
{
|
||||||
"key": "PATH",
|
"key": "PATH",
|
||||||
|
@ -53,6 +51,7 @@
|
||||||
"oom_score_adj": 30
|
"oom_score_adj": 30
|
||||||
},
|
},
|
||||||
"security_context": {
|
"security_context": {
|
||||||
|
"seccomp_profile_path": "%VALUE%",
|
||||||
"capabilities": {
|
"capabilities": {
|
||||||
"add_capabilities": [
|
"add_capabilities": [
|
||||||
"setuid",
|
"setuid",
|
||||||
|
|
1
test/testdata/container_redis.json
vendored
1
test/testdata/container_redis.json
vendored
|
@ -45,6 +45,7 @@
|
||||||
"tty": false,
|
"tty": false,
|
||||||
"linux": {
|
"linux": {
|
||||||
"resources": {
|
"resources": {
|
||||||
|
"memory_limit_in_bytes": 209715200,
|
||||||
"cpu_period": 10000,
|
"cpu_period": 10000,
|
||||||
"cpu_quota": 20000,
|
"cpu_quota": 20000,
|
||||||
"cpu_shares": 512,
|
"cpu_shares": 512,
|
||||||
|
|
2
test/testdata/sandbox_config.json
vendored
2
test/testdata/sandbox_config.json
vendored
|
@ -28,8 +28,6 @@
|
||||||
},
|
},
|
||||||
"annotations": {
|
"annotations": {
|
||||||
"owner": "hmeng",
|
"owner": "hmeng",
|
||||||
"security.alpha.kubernetes.io/sysctls": "kernel.shm_rmid_forced=1,net.ipv4.ip_local_port_range=1024 65000",
|
|
||||||
"security.alpha.kubernetes.io/unsafe-sysctls": "kernel.msgmax=8192" ,
|
|
||||||
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
|
|
1
test/testdata/sandbox_config_hostnet.json
vendored
1
test/testdata/sandbox_config_hostnet.json
vendored
|
@ -32,7 +32,6 @@
|
||||||
},
|
},
|
||||||
"annotations": {
|
"annotations": {
|
||||||
"owner": "hmeng",
|
"owner": "hmeng",
|
||||||
"security.alpha.kubernetes.io/unsafe-sysctls": "kernel.msgmax=8192" ,
|
|
||||||
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
|
|
2
test/testdata/sandbox_config_hostport.json
vendored
2
test/testdata/sandbox_config_hostport.json
vendored
|
@ -38,8 +38,6 @@
|
||||||
},
|
},
|
||||||
"annotations": {
|
"annotations": {
|
||||||
"owner": "hmeng",
|
"owner": "hmeng",
|
||||||
"security.alpha.kubernetes.io/sysctls": "kernel.shm_rmid_forced=1,net.ipv4.ip_local_port_range=1024 65000",
|
|
||||||
"security.alpha.kubernetes.io/unsafe-sysctls": "kernel.msgmax=8192" ,
|
|
||||||
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
|
|
2
test/testdata/sandbox_config_seccomp.json
vendored
2
test/testdata/sandbox_config_seccomp.json
vendored
|
@ -32,11 +32,11 @@
|
||||||
},
|
},
|
||||||
"annotations": {
|
"annotations": {
|
||||||
"owner": "hmeng"
|
"owner": "hmeng"
|
||||||
%VALUE%
|
|
||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
"cgroup_parent": "/Burstable/pod_123-456",
|
"cgroup_parent": "/Burstable/pod_123-456",
|
||||||
"security_context": {
|
"security_context": {
|
||||||
|
"seccomp_profile_path": "%VALUE%",
|
||||||
"namespace_options": {
|
"namespace_options": {
|
||||||
"host_network": false,
|
"host_network": false,
|
||||||
"host_pid": false,
|
"host_pid": false,
|
||||||
|
|
2
test/testdata/sandbox_config_selinux.json
vendored
2
test/testdata/sandbox_config_selinux.json
vendored
|
@ -28,8 +28,6 @@
|
||||||
},
|
},
|
||||||
"annotations": {
|
"annotations": {
|
||||||
"owner": "hmeng",
|
"owner": "hmeng",
|
||||||
"security.alpha.kubernetes.io/sysctls": "kernel.shm_rmid_forced=1,net.ipv4.ip_local_port_range=1024 65000",
|
|
||||||
"security.alpha.kubernetes.io/unsafe-sysctls": "kernel.msgmax=8192" ,
|
|
||||||
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
|
|
54
test/testdata/sandbox_config_sysctl.json
vendored
Normal file
54
test/testdata/sandbox_config_sysctl.json
vendored
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
{
|
||||||
|
"metadata": {
|
||||||
|
"name": "podsandbox1",
|
||||||
|
"uid": "redhat-test-crio",
|
||||||
|
"namespace": "redhat.test.crio",
|
||||||
|
"attempt": 1
|
||||||
|
},
|
||||||
|
"hostname": "crioctl_host",
|
||||||
|
"log_directory": "",
|
||||||
|
"dns_config": {
|
||||||
|
"searches": [
|
||||||
|
"8.8.8.8"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"port_mappings": [],
|
||||||
|
"resources": {
|
||||||
|
"cpu": {
|
||||||
|
"limits": 3,
|
||||||
|
"requests": 2
|
||||||
|
},
|
||||||
|
"memory": {
|
||||||
|
"limits": 50000000,
|
||||||
|
"requests": 2000000
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"labels": {
|
||||||
|
"group": "test"
|
||||||
|
},
|
||||||
|
"annotations": {
|
||||||
|
"owner": "hmeng",
|
||||||
|
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
||||||
|
},
|
||||||
|
"linux": {
|
||||||
|
"sysctls": {
|
||||||
|
"kernel.shm_rmid_forced": "1",
|
||||||
|
"net.ipv4.ip_local_port_range": "1024 65000",
|
||||||
|
"kernel.msgmax": "8192"
|
||||||
|
},
|
||||||
|
"cgroup_parent": "/Burstable/pod_123-456",
|
||||||
|
"security_context": {
|
||||||
|
"namespace_options": {
|
||||||
|
"host_network": false,
|
||||||
|
"host_pid": false,
|
||||||
|
"host_ipc": false
|
||||||
|
},
|
||||||
|
"selinux_options": {
|
||||||
|
"user": "system_u",
|
||||||
|
"role": "system_r",
|
||||||
|
"type": "svirt_lxc_net_t",
|
||||||
|
"level": "s0:c4,c5"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
2
test/testdata/template_sandbox_config.json
vendored
2
test/testdata/template_sandbox_config.json
vendored
|
@ -28,8 +28,6 @@
|
||||||
},
|
},
|
||||||
"annotations": {
|
"annotations": {
|
||||||
"owner": "hmeng",
|
"owner": "hmeng",
|
||||||
"security.alpha.kubernetes.io/sysctls": "kernel.shm_rmid_forced=1,net.ipv4.ip_local_port_range=1024 65000",
|
|
||||||
"security.alpha.kubernetes.io/unsafe-sysctls": "kernel.msgmax=8192" ,
|
|
||||||
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
"security.alpha.kubernetes.io/seccomp/pod": "unconfined"
|
||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
|
|
21
vendor.conf
21
vendor.conf
|
@ -1,8 +1,16 @@
|
||||||
k8s.io/kubernetes v1.7.8 https://github.com/kubernetes/kubernetes
|
k8s.io/kubernetes v1.8.1 https://github.com/kubernetes/kubernetes
|
||||||
k8s.io/client-go release-4.0 https://github.com/kubernetes/client-go
|
k8s.io/client-go release-5.0 https://github.com/kubernetes/client-go
|
||||||
k8s.io/apimachinery release-1.7 https://github.com/kubernetes/apimachinery
|
k8s.io/apimachinery release-1.8 https://github.com/kubernetes/apimachinery
|
||||||
k8s.io/apiserver release-1.7 https://github.com/kubernetes/apiserver
|
k8s.io/apiserver release-1.8 https://github.com/kubernetes/apiserver
|
||||||
|
k8s.io/utils 4fe312863be2155a7b68acd2aff1c9221b24e68c https://github.com/kubernetes/utils
|
||||||
|
k8s.io/api release-1.8 https://github.com/kubernetes/api
|
||||||
|
k8s.io/kube-openapi abfc5fbe1cf87ee697db107fdfd24c32fe4397a8 https://github.com/kubernetes/kube-openapi
|
||||||
|
k8s.io/apiextensions-apiserver release-1.8 https://github.com/kubernetes/apiextensions-apiserver
|
||||||
#
|
#
|
||||||
|
github.com/googleapis/gnostic 0c5108395e2debce0d731cf0287ddf7242066aba
|
||||||
|
github.com/gregjones/httpcache 787624de3eb7bd915c329cba748687a3b22666a6
|
||||||
|
github.com/json-iterator/go 1.0.0
|
||||||
|
github.com/peterbourgon/diskv v2.0.1
|
||||||
github.com/sirupsen/logrus v1.0.0
|
github.com/sirupsen/logrus v1.0.0
|
||||||
github.com/containers/image storage-update https://github.com/nalind/image
|
github.com/containers/image storage-update https://github.com/nalind/image
|
||||||
github.com/docker/docker-credential-helpers d68f9aeca33f5fd3f08eeae5e9d175edf4e731d1
|
github.com/docker/docker-credential-helpers d68f9aeca33f5fd3f08eeae5e9d175edf4e731d1
|
||||||
|
@ -59,8 +67,9 @@ golang.org/x/net c427ad74c6d7a814201695e9ffde0c5d400a7674
|
||||||
golang.org/x/sys 9aade4d3a3b7e6d876cd3823ad20ec45fc035402
|
golang.org/x/sys 9aade4d3a3b7e6d876cd3823ad20ec45fc035402
|
||||||
golang.org/x/text f72d8390a633d5dfb0cc84043294db9f6c935756
|
golang.org/x/text f72d8390a633d5dfb0cc84043294db9f6c935756
|
||||||
github.com/kr/pty v1.0.0
|
github.com/kr/pty v1.0.0
|
||||||
github.com/gogo/protobuf v0.3
|
github.com/google/btree 7d79101e329e5a3adf994758c578dab82b90c017
|
||||||
github.com/golang/protobuf 748d386b5c1ea99658fd69fe9f03991ce86a90c1
|
github.com/gogo/protobuf c0656edd0d9eab7c66d1eb0c568f9039345796f7
|
||||||
|
github.com/golang/protobuf 4bd1920723d7b7c925de087aa32e2187708897f7
|
||||||
github.com/coreos/go-systemd v14
|
github.com/coreos/go-systemd v14
|
||||||
github.com/coreos/pkg v3
|
github.com/coreos/pkg v3
|
||||||
github.com/golang/groupcache b710c8433bd175204919eb38776e944233235d03
|
github.com/golang/groupcache b710c8433bd175204919eb38776e944233235d03
|
||||||
|
|
78
vendor/github.com/docker/docker/pkg/templates/templates.go
generated
vendored
Normal file
78
vendor/github.com/docker/docker/pkg/templates/templates.go
generated
vendored
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
package templates
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
|
"text/template"
|
||||||
|
)
|
||||||
|
|
||||||
|
// basicFunctions are the set of initial
|
||||||
|
// functions provided to every template.
|
||||||
|
var basicFunctions = template.FuncMap{
|
||||||
|
"json": func(v interface{}) string {
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
enc := json.NewEncoder(buf)
|
||||||
|
enc.SetEscapeHTML(false)
|
||||||
|
enc.Encode(v)
|
||||||
|
// Remove the trailing new line added by the encoder
|
||||||
|
return strings.TrimSpace(buf.String())
|
||||||
|
},
|
||||||
|
"split": strings.Split,
|
||||||
|
"join": strings.Join,
|
||||||
|
"title": strings.Title,
|
||||||
|
"lower": strings.ToLower,
|
||||||
|
"upper": strings.ToUpper,
|
||||||
|
"pad": padWithSpace,
|
||||||
|
"truncate": truncateWithLength,
|
||||||
|
}
|
||||||
|
|
||||||
|
// HeaderFunctions are used to created headers of a table.
|
||||||
|
// This is a replacement of basicFunctions for header generation
|
||||||
|
// because we want the header to remain intact.
|
||||||
|
// Some functions like `split` are irrelevant so not added.
|
||||||
|
var HeaderFunctions = template.FuncMap{
|
||||||
|
"json": func(v string) string {
|
||||||
|
return v
|
||||||
|
},
|
||||||
|
"title": func(v string) string {
|
||||||
|
return v
|
||||||
|
},
|
||||||
|
"lower": func(v string) string {
|
||||||
|
return v
|
||||||
|
},
|
||||||
|
"upper": func(v string) string {
|
||||||
|
return v
|
||||||
|
},
|
||||||
|
"truncate": func(v string, l int) string {
|
||||||
|
return v
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse creates a new anonymous template with the basic functions
|
||||||
|
// and parses the given format.
|
||||||
|
func Parse(format string) (*template.Template, error) {
|
||||||
|
return NewParse("", format)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewParse creates a new tagged template with the basic functions
|
||||||
|
// and parses the given format.
|
||||||
|
func NewParse(tag, format string) (*template.Template, error) {
|
||||||
|
return template.New(tag).Funcs(basicFunctions).Parse(format)
|
||||||
|
}
|
||||||
|
|
||||||
|
// padWithSpace adds whitespace to the input if the input is non-empty
|
||||||
|
func padWithSpace(source string, prefix, suffix int) string {
|
||||||
|
if source == "" {
|
||||||
|
return source
|
||||||
|
}
|
||||||
|
return strings.Repeat(" ", prefix) + source + strings.Repeat(" ", suffix)
|
||||||
|
}
|
||||||
|
|
||||||
|
// truncateWithLength truncates the source string up to the length provided by the input
|
||||||
|
func truncateWithLength(source string, length int) string {
|
||||||
|
if len(source) < length {
|
||||||
|
return source
|
||||||
|
}
|
||||||
|
return source[:length]
|
||||||
|
}
|
66
vendor/github.com/exponent-io/jsonpath/README.md
generated
vendored
66
vendor/github.com/exponent-io/jsonpath/README.md
generated
vendored
|
@ -1,66 +0,0 @@
|
||||||
[![GoDoc](https://godoc.org/github.com/exponent-io/jsonpath?status.svg)](https://godoc.org/github.com/exponent-io/jsonpath)
|
|
||||||
[![Build Status](https://travis-ci.org/exponent-io/jsonpath.svg?branch=master)](https://travis-ci.org/exponent-io/jsonpath)
|
|
||||||
|
|
||||||
# jsonpath
|
|
||||||
|
|
||||||
This package extends the [json.Decoder](https://golang.org/pkg/encoding/json/#Decoder) to support navigating a stream of JSON tokens. You should be able to use this extended Decoder places where a json.Decoder would have been used.
|
|
||||||
|
|
||||||
This Decoder has the following enhancements...
|
|
||||||
* The [Scan](https://godoc.org/github.com/exponent-io/jsonpath/#Decoder.Scan) method supports scanning a JSON stream while extracting particular values along the way using [PathActions](https://godoc.org/github.com/exponent-io/jsonpath#PathActions).
|
|
||||||
* The [SeekTo](https://godoc.org/github.com/exponent-io/jsonpath#Decoder.SeekTo) method supports seeking forward in a JSON token stream to a particular path.
|
|
||||||
* The [Path](https://godoc.org/github.com/exponent-io/jsonpath#Decoder.Path) method returns the path of the most recently parsed token.
|
|
||||||
* The [Token](https://godoc.org/github.com/exponent-io/jsonpath#Decoder.Token) method has been modified to distinguish between strings that are object keys and strings that are values. Object key strings are returned as the [KeyString](https://godoc.org/github.com/exponent-io/jsonpath#KeyString) type rather than a native string.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
go get -u github.com/exponent-io/jsonpath
|
|
||||||
|
|
||||||
## Example Usage
|
|
||||||
|
|
||||||
#### SeekTo
|
|
||||||
|
|
||||||
```go
|
|
||||||
import "github.com/exponent-io/jsonpath"
|
|
||||||
|
|
||||||
var j = []byte(`[
|
|
||||||
{"Space": "YCbCr", "Point": {"Y": 255, "Cb": 0, "Cr": -10}},
|
|
||||||
{"Space": "RGB", "Point": {"R": 98, "G": 218, "B": 255}}
|
|
||||||
]`)
|
|
||||||
|
|
||||||
w := json.NewDecoder(bytes.NewReader(j))
|
|
||||||
var v interface{}
|
|
||||||
|
|
||||||
w.SeekTo(1, "Point", "G")
|
|
||||||
w.Decode(&v) // v is 218
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Scan with PathActions
|
|
||||||
|
|
||||||
```go
|
|
||||||
var j = []byte(`{"colors":[
|
|
||||||
{"Space": "YCbCr", "Point": {"Y": 255, "Cb": 0, "Cr": -10, "A": 58}},
|
|
||||||
{"Space": "RGB", "Point": {"R": 98, "G": 218, "B": 255, "A": 231}}
|
|
||||||
]}`)
|
|
||||||
|
|
||||||
var actions PathActions
|
|
||||||
|
|
||||||
// Extract the value at Point.A
|
|
||||||
actions.Add(func(d *Decoder) error {
|
|
||||||
var alpha int
|
|
||||||
err := d.Decode(&alpha)
|
|
||||||
fmt.Printf("Alpha: %v\n", alpha)
|
|
||||||
return err
|
|
||||||
}, "Point", "A")
|
|
||||||
|
|
||||||
w := NewDecoder(bytes.NewReader(j))
|
|
||||||
w.SeekTo("colors", 0)
|
|
||||||
|
|
||||||
var ok = true
|
|
||||||
var err error
|
|
||||||
for ok {
|
|
||||||
ok, err = w.Scan(&actions)
|
|
||||||
if err != nil && err != io.EOF {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
210
vendor/github.com/exponent-io/jsonpath/decoder.go
generated
vendored
210
vendor/github.com/exponent-io/jsonpath/decoder.go
generated
vendored
|
@ -1,210 +0,0 @@
|
||||||
package jsonpath
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"io"
|
|
||||||
)
|
|
||||||
|
|
||||||
// KeyString is returned from Decoder.Token to represent each key in a JSON object value.
|
|
||||||
type KeyString string
|
|
||||||
|
|
||||||
// Decoder extends the Go runtime's encoding/json.Decoder to support navigating in a stream of JSON tokens.
|
|
||||||
type Decoder struct {
|
|
||||||
json.Decoder
|
|
||||||
|
|
||||||
path JsonPath
|
|
||||||
context jsonContext
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewDecoder creates a new instance of the extended JSON Decoder.
|
|
||||||
func NewDecoder(r io.Reader) *Decoder {
|
|
||||||
return &Decoder{Decoder: *json.NewDecoder(r)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SeekTo causes the Decoder to move forward to a given path in the JSON structure.
|
|
||||||
//
|
|
||||||
// The path argument must consist of strings or integers. Each string specifies an JSON object key, and
|
|
||||||
// each integer specifies an index into a JSON array.
|
|
||||||
//
|
|
||||||
// Consider the JSON structure
|
|
||||||
//
|
|
||||||
// { "a": [0,"s",12e4,{"b":0,"v":35} ] }
|
|
||||||
//
|
|
||||||
// SeekTo("a",3,"v") will move to the value referenced by the "a" key in the current object,
|
|
||||||
// followed by a move to the 4th value (index 3) in the array, followed by a move to the value at key "v".
|
|
||||||
// In this example, a subsequent call to the decoder's Decode() would unmarshal the value 35.
|
|
||||||
//
|
|
||||||
// SeekTo returns a boolean value indicating whether a match was found.
|
|
||||||
//
|
|
||||||
// Decoder is intended to be used with a stream of tokens. As a result it navigates forward only.
|
|
||||||
func (d *Decoder) SeekTo(path ...interface{}) (bool, error) {
|
|
||||||
|
|
||||||
if len(path) == 0 {
|
|
||||||
return len(d.path) == 0, nil
|
|
||||||
}
|
|
||||||
last := len(path) - 1
|
|
||||||
if i, ok := path[last].(int); ok {
|
|
||||||
path[last] = i - 1
|
|
||||||
}
|
|
||||||
|
|
||||||
for {
|
|
||||||
if d.path.Equal(path) {
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
_, err := d.Token()
|
|
||||||
if err == io.EOF {
|
|
||||||
return false, nil
|
|
||||||
} else if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decode reads the next JSON-encoded value from its input and stores it in the value pointed to by v. This is
|
|
||||||
// equivalent to encoding/json.Decode().
|
|
||||||
func (d *Decoder) Decode(v interface{}) error {
|
|
||||||
switch d.context {
|
|
||||||
case objValue:
|
|
||||||
d.context = objKey
|
|
||||||
break
|
|
||||||
case arrValue:
|
|
||||||
d.path.incTop()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return d.Decoder.Decode(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Path returns a slice of string and/or int values representing the path from the root of the JSON object to the
|
|
||||||
// position of the most-recently parsed token.
|
|
||||||
func (d *Decoder) Path() JsonPath {
|
|
||||||
p := make(JsonPath, len(d.path))
|
|
||||||
copy(p, d.path)
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
// Token is equivalent to the Token() method on json.Decoder. The primary difference is that it distinguishes
|
|
||||||
// between strings that are keys and and strings that are values. String tokens that are object keys are returned as a
|
|
||||||
// KeyString rather than as a native string.
|
|
||||||
func (d *Decoder) Token() (json.Token, error) {
|
|
||||||
t, err := d.Decoder.Token()
|
|
||||||
if err != nil {
|
|
||||||
return t, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if t == nil {
|
|
||||||
switch d.context {
|
|
||||||
case objValue:
|
|
||||||
d.context = objKey
|
|
||||||
break
|
|
||||||
case arrValue:
|
|
||||||
d.path.incTop()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return t, err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch t := t.(type) {
|
|
||||||
case json.Delim:
|
|
||||||
switch t {
|
|
||||||
case json.Delim('{'):
|
|
||||||
if d.context == arrValue {
|
|
||||||
d.path.incTop()
|
|
||||||
}
|
|
||||||
d.path.push("")
|
|
||||||
d.context = objKey
|
|
||||||
break
|
|
||||||
case json.Delim('}'):
|
|
||||||
d.path.pop()
|
|
||||||
d.context = d.path.inferContext()
|
|
||||||
break
|
|
||||||
case json.Delim('['):
|
|
||||||
if d.context == arrValue {
|
|
||||||
d.path.incTop()
|
|
||||||
}
|
|
||||||
d.path.push(-1)
|
|
||||||
d.context = arrValue
|
|
||||||
break
|
|
||||||
case json.Delim(']'):
|
|
||||||
d.path.pop()
|
|
||||||
d.context = d.path.inferContext()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case float64, json.Number, bool:
|
|
||||||
switch d.context {
|
|
||||||
case objValue:
|
|
||||||
d.context = objKey
|
|
||||||
break
|
|
||||||
case arrValue:
|
|
||||||
d.path.incTop()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case string:
|
|
||||||
switch d.context {
|
|
||||||
case objKey:
|
|
||||||
d.path.nameTop(t)
|
|
||||||
d.context = objValue
|
|
||||||
return KeyString(t), err
|
|
||||||
case objValue:
|
|
||||||
d.context = objKey
|
|
||||||
case arrValue:
|
|
||||||
d.path.incTop()
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
return t, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Scan moves forward over the JSON stream consuming all the tokens at the current level (current object, current array)
|
|
||||||
// invoking each matching PathAction along the way.
|
|
||||||
//
|
|
||||||
// Scan returns true if there are more contiguous values to scan (for example in an array).
|
|
||||||
func (d *Decoder) Scan(ext *PathActions) (bool, error) {
|
|
||||||
|
|
||||||
rootPath := d.Path()
|
|
||||||
|
|
||||||
// If this is an array path, increment the root path in our local copy.
|
|
||||||
if rootPath.inferContext() == arrValue {
|
|
||||||
rootPath.incTop()
|
|
||||||
}
|
|
||||||
|
|
||||||
for {
|
|
||||||
// advance the token position
|
|
||||||
_, err := d.Token()
|
|
||||||
if err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
match:
|
|
||||||
var relPath JsonPath
|
|
||||||
|
|
||||||
// capture the new JSON path
|
|
||||||
path := d.Path()
|
|
||||||
|
|
||||||
if len(path) > len(rootPath) {
|
|
||||||
// capture the path relative to where the scan started
|
|
||||||
relPath = path[len(rootPath):]
|
|
||||||
} else {
|
|
||||||
// if the path is not longer than the root, then we are done with this scan
|
|
||||||
// return boolean flag indicating if there are more items to scan at the same level
|
|
||||||
return d.Decoder.More(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// match the relative path against the path actions
|
|
||||||
if node := ext.node.match(relPath); node != nil {
|
|
||||||
if node.action != nil {
|
|
||||||
// we have a match so execute the action
|
|
||||||
err = node.action(d)
|
|
||||||
if err != nil {
|
|
||||||
return d.Decoder.More(), err
|
|
||||||
}
|
|
||||||
// The action may have advanced the decoder. If we are in an array, advancing it further would
|
|
||||||
// skip tokens. So, if we are scanning an array, jump to the top without advancing the token.
|
|
||||||
if d.path.inferContext() == arrValue && d.Decoder.More() {
|
|
||||||
goto match
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
67
vendor/github.com/exponent-io/jsonpath/path.go
generated
vendored
67
vendor/github.com/exponent-io/jsonpath/path.go
generated
vendored
|
@ -1,67 +0,0 @@
|
||||||
// Extends the Go runtime's json.Decoder enabling navigation of a stream of json tokens.
|
|
||||||
package jsonpath
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
type jsonContext int
|
|
||||||
|
|
||||||
const (
|
|
||||||
none jsonContext = iota
|
|
||||||
objKey
|
|
||||||
objValue
|
|
||||||
arrValue
|
|
||||||
)
|
|
||||||
|
|
||||||
// AnyIndex can be used in a pattern to match any array index.
|
|
||||||
const AnyIndex = -2
|
|
||||||
|
|
||||||
// JsonPath is a slice of strings and/or integers. Each string specifies an JSON object key, and
|
|
||||||
// each integer specifies an index into a JSON array.
|
|
||||||
type JsonPath []interface{}
|
|
||||||
|
|
||||||
func (p *JsonPath) push(n interface{}) { *p = append(*p, n) }
|
|
||||||
func (p *JsonPath) pop() { *p = (*p)[:len(*p)-1] }
|
|
||||||
|
|
||||||
// increment the index at the top of the stack (must be an array index)
|
|
||||||
func (p *JsonPath) incTop() { (*p)[len(*p)-1] = (*p)[len(*p)-1].(int) + 1 }
|
|
||||||
|
|
||||||
// name the key at the top of the stack (must be an object key)
|
|
||||||
func (p *JsonPath) nameTop(n string) { (*p)[len(*p)-1] = n }
|
|
||||||
|
|
||||||
// infer the context from the item at the top of the stack
|
|
||||||
func (p *JsonPath) inferContext() jsonContext {
|
|
||||||
if len(*p) == 0 {
|
|
||||||
return none
|
|
||||||
}
|
|
||||||
t := (*p)[len(*p)-1]
|
|
||||||
switch t.(type) {
|
|
||||||
case string:
|
|
||||||
return objKey
|
|
||||||
case int:
|
|
||||||
return arrValue
|
|
||||||
default:
|
|
||||||
panic(fmt.Sprintf("Invalid stack type %T", t))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Equal tests for equality between two JsonPath types.
|
|
||||||
func (p *JsonPath) Equal(o JsonPath) bool {
|
|
||||||
if len(*p) != len(o) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i, v := range *p {
|
|
||||||
if v != o[i] {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *JsonPath) HasPrefix(o JsonPath) bool {
|
|
||||||
for i, v := range o {
|
|
||||||
if v != (*p)[i] {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
61
vendor/github.com/exponent-io/jsonpath/pathaction.go
generated
vendored
61
vendor/github.com/exponent-io/jsonpath/pathaction.go
generated
vendored
|
@ -1,61 +0,0 @@
|
||||||
package jsonpath
|
|
||||||
|
|
||||||
// pathNode is used to construct a trie of paths to be matched
|
|
||||||
type pathNode struct {
|
|
||||||
matchOn interface{} // string, or integer
|
|
||||||
childNodes []pathNode
|
|
||||||
action DecodeAction
|
|
||||||
}
|
|
||||||
|
|
||||||
// match climbs the trie to find a node that matches the given JSON path.
|
|
||||||
func (n *pathNode) match(path JsonPath) *pathNode {
|
|
||||||
var node *pathNode = n
|
|
||||||
for _, ps := range path {
|
|
||||||
found := false
|
|
||||||
for i, n := range node.childNodes {
|
|
||||||
if n.matchOn == ps {
|
|
||||||
node = &node.childNodes[i]
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
} else if _, ok := ps.(int); ok && n.matchOn == AnyIndex {
|
|
||||||
node = &node.childNodes[i]
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return node
|
|
||||||
}
|
|
||||||
|
|
||||||
// PathActions represents a collection of DecodeAction functions that should be called at certain path positions
|
|
||||||
// when scanning the JSON stream. PathActions can be created once and used many times in one or more JSON streams.
|
|
||||||
type PathActions struct {
|
|
||||||
node pathNode
|
|
||||||
}
|
|
||||||
|
|
||||||
// DecodeAction handlers are called by the Decoder when scanning objects. See PathActions.Add for more detail.
|
|
||||||
type DecodeAction func(d *Decoder) error
|
|
||||||
|
|
||||||
// Add specifies an action to call on the Decoder when the specified path is encountered.
|
|
||||||
func (je *PathActions) Add(action DecodeAction, path ...interface{}) {
|
|
||||||
|
|
||||||
var node *pathNode = &je.node
|
|
||||||
for _, ps := range path {
|
|
||||||
found := false
|
|
||||||
for i, n := range node.childNodes {
|
|
||||||
if n.matchOn == ps {
|
|
||||||
node = &node.childNodes[i]
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found {
|
|
||||||
node.childNodes = append(node.childNodes, pathNode{matchOn: ps})
|
|
||||||
node = &node.childNodes[len(node.childNodes)-1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
node.action = action
|
|
||||||
}
|
|
6
vendor/github.com/go-openapi/analysis/README.md
generated
vendored
6
vendor/github.com/go-openapi/analysis/README.md
generated
vendored
|
@ -1,6 +0,0 @@
|
||||||
# OpenAPI initiative analysis [![Build Status](https://ci.vmware.run/api/badges/go-openapi/analysis/status.svg)](https://ci.vmware.run/go-openapi/analysis) [![Coverage](https://coverage.vmware.run/badges/go-openapi/analysis/coverage.svg)](https://coverage.vmware.run/go-openapi/analysis) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
|
|
||||||
|
|
||||||
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/analysis?status.svg)](http://godoc.org/github.com/go-openapi/analysis)
|
|
||||||
|
|
||||||
|
|
||||||
A foundational library to analyze an OAI specification document for easier reasoning about the content.
|
|
614
vendor/github.com/go-openapi/analysis/analyzer.go
generated
vendored
614
vendor/github.com/go-openapi/analysis/analyzer.go
generated
vendored
|
@ -1,614 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package analysis
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
slashpath "path"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/go-openapi/jsonpointer"
|
|
||||||
"github.com/go-openapi/spec"
|
|
||||||
"github.com/go-openapi/swag"
|
|
||||||
)
|
|
||||||
|
|
||||||
type referenceAnalysis struct {
|
|
||||||
schemas map[string]spec.Ref
|
|
||||||
responses map[string]spec.Ref
|
|
||||||
parameters map[string]spec.Ref
|
|
||||||
items map[string]spec.Ref
|
|
||||||
allRefs map[string]spec.Ref
|
|
||||||
referenced struct {
|
|
||||||
schemas map[string]SchemaRef
|
|
||||||
responses map[string]*spec.Response
|
|
||||||
parameters map[string]*spec.Parameter
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addRef(key string, ref spec.Ref) {
|
|
||||||
r.allRefs["#"+key] = ref
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addItemsRef(key string, items *spec.Items) {
|
|
||||||
r.items["#"+key] = items.Ref
|
|
||||||
r.addRef(key, items.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addSchemaRef(key string, ref SchemaRef) {
|
|
||||||
r.schemas["#"+key] = ref.Schema.Ref
|
|
||||||
r.addRef(key, ref.Schema.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addResponseRef(key string, resp *spec.Response) {
|
|
||||||
r.responses["#"+key] = resp.Ref
|
|
||||||
r.addRef(key, resp.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addParamRef(key string, param *spec.Parameter) {
|
|
||||||
r.parameters["#"+key] = param.Ref
|
|
||||||
r.addRef(key, param.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
// New takes a swagger spec object and returns an analyzed spec document.
|
|
||||||
// The analyzed document contains a number of indices that make it easier to
|
|
||||||
// reason about semantics of a swagger specification for use in code generation
|
|
||||||
// or validation etc.
|
|
||||||
func New(doc *spec.Swagger) *Spec {
|
|
||||||
a := &Spec{
|
|
||||||
spec: doc,
|
|
||||||
consumes: make(map[string]struct{}, 150),
|
|
||||||
produces: make(map[string]struct{}, 150),
|
|
||||||
authSchemes: make(map[string]struct{}, 150),
|
|
||||||
operations: make(map[string]map[string]*spec.Operation, 150),
|
|
||||||
allSchemas: make(map[string]SchemaRef, 150),
|
|
||||||
allOfs: make(map[string]SchemaRef, 150),
|
|
||||||
references: referenceAnalysis{
|
|
||||||
schemas: make(map[string]spec.Ref, 150),
|
|
||||||
responses: make(map[string]spec.Ref, 150),
|
|
||||||
parameters: make(map[string]spec.Ref, 150),
|
|
||||||
items: make(map[string]spec.Ref, 150),
|
|
||||||
allRefs: make(map[string]spec.Ref, 150),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
a.references.referenced.schemas = make(map[string]SchemaRef, 150)
|
|
||||||
a.references.referenced.responses = make(map[string]*spec.Response, 150)
|
|
||||||
a.references.referenced.parameters = make(map[string]*spec.Parameter, 150)
|
|
||||||
a.initialize()
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
|
|
||||||
// Spec takes a swagger spec object and turns it into a registry
|
|
||||||
// with a bunch of utility methods to act on the information in the spec
|
|
||||||
type Spec struct {
|
|
||||||
spec *spec.Swagger
|
|
||||||
consumes map[string]struct{}
|
|
||||||
produces map[string]struct{}
|
|
||||||
authSchemes map[string]struct{}
|
|
||||||
operations map[string]map[string]*spec.Operation
|
|
||||||
references referenceAnalysis
|
|
||||||
allSchemas map[string]SchemaRef
|
|
||||||
allOfs map[string]SchemaRef
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) initialize() {
|
|
||||||
for _, c := range s.spec.Consumes {
|
|
||||||
s.consumes[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, c := range s.spec.Produces {
|
|
||||||
s.produces[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, ss := range s.spec.Security {
|
|
||||||
for k := range ss {
|
|
||||||
s.authSchemes[k] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for path, pathItem := range s.AllPaths() {
|
|
||||||
s.analyzeOperations(path, &pathItem)
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, parameter := range s.spec.Parameters {
|
|
||||||
refPref := slashpath.Join("/parameters", jsonpointer.Escape(name))
|
|
||||||
if parameter.Items != nil {
|
|
||||||
s.analyzeItems("items", parameter.Items, refPref)
|
|
||||||
}
|
|
||||||
if parameter.In == "body" && parameter.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *parameter.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, response := range s.spec.Responses {
|
|
||||||
refPref := slashpath.Join("/responses", jsonpointer.Escape(name))
|
|
||||||
for _, v := range response.Headers {
|
|
||||||
if v.Items != nil {
|
|
||||||
s.analyzeItems("items", v.Items, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if response.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *response.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, schema := range s.spec.Definitions {
|
|
||||||
s.analyzeSchema(name, schema, "/definitions")
|
|
||||||
}
|
|
||||||
// TODO: after analyzing all things and flattening schemas etc
|
|
||||||
// resolve all the collected references to their final representations
|
|
||||||
// best put in a separate method because this could get expensive
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeOperations(path string, pi *spec.PathItem) {
|
|
||||||
// TODO: resolve refs here?
|
|
||||||
op := pi
|
|
||||||
s.analyzeOperation("GET", path, op.Get)
|
|
||||||
s.analyzeOperation("PUT", path, op.Put)
|
|
||||||
s.analyzeOperation("POST", path, op.Post)
|
|
||||||
s.analyzeOperation("PATCH", path, op.Patch)
|
|
||||||
s.analyzeOperation("DELETE", path, op.Delete)
|
|
||||||
s.analyzeOperation("HEAD", path, op.Head)
|
|
||||||
s.analyzeOperation("OPTIONS", path, op.Options)
|
|
||||||
for i, param := range op.Parameters {
|
|
||||||
refPref := slashpath.Join("/paths", jsonpointer.Escape(path), "parameters", strconv.Itoa(i))
|
|
||||||
if param.Ref.String() != "" {
|
|
||||||
s.references.addParamRef(refPref, ¶m)
|
|
||||||
}
|
|
||||||
if param.Items != nil {
|
|
||||||
s.analyzeItems("items", param.Items, refPref)
|
|
||||||
}
|
|
||||||
if param.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *param.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeItems(name string, items *spec.Items, prefix string) {
|
|
||||||
if items == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
refPref := slashpath.Join(prefix, name)
|
|
||||||
s.analyzeItems(name, items.Items, refPref)
|
|
||||||
if items.Ref.String() != "" {
|
|
||||||
s.references.addItemsRef(refPref, items)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeOperation(method, path string, op *spec.Operation) {
|
|
||||||
if op == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, c := range op.Consumes {
|
|
||||||
s.consumes[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, c := range op.Produces {
|
|
||||||
s.produces[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, ss := range op.Security {
|
|
||||||
for k := range ss {
|
|
||||||
s.authSchemes[k] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if _, ok := s.operations[method]; !ok {
|
|
||||||
s.operations[method] = make(map[string]*spec.Operation)
|
|
||||||
}
|
|
||||||
s.operations[method][path] = op
|
|
||||||
prefix := slashpath.Join("/paths", jsonpointer.Escape(path), strings.ToLower(method))
|
|
||||||
for i, param := range op.Parameters {
|
|
||||||
refPref := slashpath.Join(prefix, "parameters", strconv.Itoa(i))
|
|
||||||
if param.Ref.String() != "" {
|
|
||||||
s.references.addParamRef(refPref, ¶m)
|
|
||||||
}
|
|
||||||
s.analyzeItems("items", param.Items, refPref)
|
|
||||||
if param.In == "body" && param.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *param.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if op.Responses != nil {
|
|
||||||
if op.Responses.Default != nil {
|
|
||||||
refPref := slashpath.Join(prefix, "responses", "default")
|
|
||||||
if op.Responses.Default.Ref.String() != "" {
|
|
||||||
s.references.addResponseRef(refPref, op.Responses.Default)
|
|
||||||
}
|
|
||||||
for _, v := range op.Responses.Default.Headers {
|
|
||||||
s.analyzeItems("items", v.Items, refPref)
|
|
||||||
}
|
|
||||||
if op.Responses.Default.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *op.Responses.Default.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for k, res := range op.Responses.StatusCodeResponses {
|
|
||||||
refPref := slashpath.Join(prefix, "responses", strconv.Itoa(k))
|
|
||||||
if res.Ref.String() != "" {
|
|
||||||
s.references.addResponseRef(refPref, &res)
|
|
||||||
}
|
|
||||||
for _, v := range res.Headers {
|
|
||||||
s.analyzeItems("items", v.Items, refPref)
|
|
||||||
}
|
|
||||||
if res.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *res.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeSchema(name string, schema spec.Schema, prefix string) {
|
|
||||||
refURI := slashpath.Join(prefix, jsonpointer.Escape(name))
|
|
||||||
schRef := SchemaRef{
|
|
||||||
Name: name,
|
|
||||||
Schema: &schema,
|
|
||||||
Ref: spec.MustCreateRef("#" + refURI),
|
|
||||||
}
|
|
||||||
s.allSchemas["#"+refURI] = schRef
|
|
||||||
if schema.Ref.String() != "" {
|
|
||||||
s.references.addSchemaRef(refURI, schRef)
|
|
||||||
}
|
|
||||||
for k, v := range schema.Definitions {
|
|
||||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "definitions"))
|
|
||||||
}
|
|
||||||
for k, v := range schema.Properties {
|
|
||||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "properties"))
|
|
||||||
}
|
|
||||||
for k, v := range schema.PatternProperties {
|
|
||||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "patternProperties"))
|
|
||||||
}
|
|
||||||
for i, v := range schema.AllOf {
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "allOf"))
|
|
||||||
}
|
|
||||||
if len(schema.AllOf) > 0 {
|
|
||||||
s.allOfs["#"+refURI] = SchemaRef{Name: name, Schema: &schema, Ref: spec.MustCreateRef("#" + refURI)}
|
|
||||||
}
|
|
||||||
for i, v := range schema.AnyOf {
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "anyOf"))
|
|
||||||
}
|
|
||||||
for i, v := range schema.OneOf {
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "oneOf"))
|
|
||||||
}
|
|
||||||
if schema.Not != nil {
|
|
||||||
s.analyzeSchema("not", *schema.Not, refURI)
|
|
||||||
}
|
|
||||||
if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
|
|
||||||
s.analyzeSchema("additionalProperties", *schema.AdditionalProperties.Schema, refURI)
|
|
||||||
}
|
|
||||||
if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
|
|
||||||
s.analyzeSchema("additionalItems", *schema.AdditionalItems.Schema, refURI)
|
|
||||||
}
|
|
||||||
if schema.Items != nil {
|
|
||||||
if schema.Items.Schema != nil {
|
|
||||||
s.analyzeSchema("items", *schema.Items.Schema, refURI)
|
|
||||||
}
|
|
||||||
for i, sch := range schema.Items.Schemas {
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), sch, slashpath.Join(refURI, "items"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SecurityRequirement is a representation of a security requirement for an operation
|
|
||||||
type SecurityRequirement struct {
|
|
||||||
Name string
|
|
||||||
Scopes []string
|
|
||||||
}
|
|
||||||
|
|
||||||
// SecurityRequirementsFor gets the security requirements for the operation
|
|
||||||
func (s *Spec) SecurityRequirementsFor(operation *spec.Operation) []SecurityRequirement {
|
|
||||||
if s.spec.Security == nil && operation.Security == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
schemes := s.spec.Security
|
|
||||||
if operation.Security != nil {
|
|
||||||
schemes = operation.Security
|
|
||||||
}
|
|
||||||
|
|
||||||
unique := make(map[string]SecurityRequirement)
|
|
||||||
for _, scheme := range schemes {
|
|
||||||
for k, v := range scheme {
|
|
||||||
if _, ok := unique[k]; !ok {
|
|
||||||
unique[k] = SecurityRequirement{Name: k, Scopes: v}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var result []SecurityRequirement
|
|
||||||
for _, v := range unique {
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// SecurityDefinitionsFor gets the matching security definitions for a set of requirements
|
|
||||||
func (s *Spec) SecurityDefinitionsFor(operation *spec.Operation) map[string]spec.SecurityScheme {
|
|
||||||
requirements := s.SecurityRequirementsFor(operation)
|
|
||||||
if len(requirements) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
result := make(map[string]spec.SecurityScheme)
|
|
||||||
for _, v := range requirements {
|
|
||||||
if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
|
|
||||||
if definition != nil {
|
|
||||||
result[v.Name] = *definition
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// ConsumesFor gets the mediatypes for the operation
|
|
||||||
func (s *Spec) ConsumesFor(operation *spec.Operation) []string {
|
|
||||||
|
|
||||||
if len(operation.Consumes) == 0 {
|
|
||||||
cons := make(map[string]struct{}, len(s.spec.Consumes))
|
|
||||||
for _, k := range s.spec.Consumes {
|
|
||||||
cons[k] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(cons)
|
|
||||||
}
|
|
||||||
|
|
||||||
cons := make(map[string]struct{}, len(operation.Consumes))
|
|
||||||
for _, c := range operation.Consumes {
|
|
||||||
cons[c] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(cons)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProducesFor gets the mediatypes for the operation
|
|
||||||
func (s *Spec) ProducesFor(operation *spec.Operation) []string {
|
|
||||||
if len(operation.Produces) == 0 {
|
|
||||||
prod := make(map[string]struct{}, len(s.spec.Produces))
|
|
||||||
for _, k := range s.spec.Produces {
|
|
||||||
prod[k] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(prod)
|
|
||||||
}
|
|
||||||
|
|
||||||
prod := make(map[string]struct{}, len(operation.Produces))
|
|
||||||
for _, c := range operation.Produces {
|
|
||||||
prod[c] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(prod)
|
|
||||||
}
|
|
||||||
|
|
||||||
func mapKeyFromParam(param *spec.Parameter) string {
|
|
||||||
return fmt.Sprintf("%s#%s", param.In, fieldNameFromParam(param))
|
|
||||||
}
|
|
||||||
|
|
||||||
func fieldNameFromParam(param *spec.Parameter) string {
|
|
||||||
if nm, ok := param.Extensions.GetString("go-name"); ok {
|
|
||||||
return nm
|
|
||||||
}
|
|
||||||
return swag.ToGoName(param.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) paramsAsMap(parameters []spec.Parameter, res map[string]spec.Parameter) {
|
|
||||||
for _, param := range parameters {
|
|
||||||
pr := param
|
|
||||||
if pr.Ref.String() != "" {
|
|
||||||
obj, _, err := pr.Ref.GetPointer().Get(s.spec)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
pr = obj.(spec.Parameter)
|
|
||||||
}
|
|
||||||
res[mapKeyFromParam(&pr)] = pr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParametersFor the specified operation id
|
|
||||||
func (s *Spec) ParametersFor(operationID string) []spec.Parameter {
|
|
||||||
gatherParams := func(pi *spec.PathItem, op *spec.Operation) []spec.Parameter {
|
|
||||||
bag := make(map[string]spec.Parameter)
|
|
||||||
s.paramsAsMap(pi.Parameters, bag)
|
|
||||||
s.paramsAsMap(op.Parameters, bag)
|
|
||||||
|
|
||||||
var res []spec.Parameter
|
|
||||||
for _, v := range bag {
|
|
||||||
res = append(res, v)
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
for _, pi := range s.spec.Paths.Paths {
|
|
||||||
if pi.Get != nil && pi.Get.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Get)
|
|
||||||
}
|
|
||||||
if pi.Head != nil && pi.Head.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Head)
|
|
||||||
}
|
|
||||||
if pi.Options != nil && pi.Options.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Options)
|
|
||||||
}
|
|
||||||
if pi.Post != nil && pi.Post.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Post)
|
|
||||||
}
|
|
||||||
if pi.Patch != nil && pi.Patch.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Patch)
|
|
||||||
}
|
|
||||||
if pi.Put != nil && pi.Put.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Put)
|
|
||||||
}
|
|
||||||
if pi.Delete != nil && pi.Delete.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Delete)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
|
|
||||||
// apply for the method and path.
|
|
||||||
func (s *Spec) ParamsFor(method, path string) map[string]spec.Parameter {
|
|
||||||
res := make(map[string]spec.Parameter)
|
|
||||||
if pi, ok := s.spec.Paths.Paths[path]; ok {
|
|
||||||
s.paramsAsMap(pi.Parameters, res)
|
|
||||||
s.paramsAsMap(s.operations[strings.ToUpper(method)][path].Parameters, res)
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationForName gets the operation for the given id
|
|
||||||
func (s *Spec) OperationForName(operationID string) (string, string, *spec.Operation, bool) {
|
|
||||||
for method, pathItem := range s.operations {
|
|
||||||
for path, op := range pathItem {
|
|
||||||
if operationID == op.ID {
|
|
||||||
return method, path, op, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "", "", nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationFor the given method and path
|
|
||||||
func (s *Spec) OperationFor(method, path string) (*spec.Operation, bool) {
|
|
||||||
if mp, ok := s.operations[strings.ToUpper(method)]; ok {
|
|
||||||
op, fn := mp[path]
|
|
||||||
return op, fn
|
|
||||||
}
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Operations gathers all the operations specified in the spec document
|
|
||||||
func (s *Spec) Operations() map[string]map[string]*spec.Operation {
|
|
||||||
return s.operations
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) structMapKeys(mp map[string]struct{}) []string {
|
|
||||||
if len(mp) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
result := make([]string, 0, len(mp))
|
|
||||||
for k := range mp {
|
|
||||||
result = append(result, k)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllPaths returns all the paths in the swagger spec
|
|
||||||
func (s *Spec) AllPaths() map[string]spec.PathItem {
|
|
||||||
if s.spec == nil || s.spec.Paths == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return s.spec.Paths.Paths
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationIDs gets all the operation ids based on method an dpath
|
|
||||||
func (s *Spec) OperationIDs() []string {
|
|
||||||
if len(s.operations) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
result := make([]string, 0, len(s.operations))
|
|
||||||
for method, v := range s.operations {
|
|
||||||
for p, o := range v {
|
|
||||||
if o.ID != "" {
|
|
||||||
result = append(result, o.ID)
|
|
||||||
} else {
|
|
||||||
result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequiredConsumes gets all the distinct consumes that are specified in the specification document
|
|
||||||
func (s *Spec) RequiredConsumes() []string {
|
|
||||||
return s.structMapKeys(s.consumes)
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequiredProduces gets all the distinct produces that are specified in the specification document
|
|
||||||
func (s *Spec) RequiredProduces() []string {
|
|
||||||
return s.structMapKeys(s.produces)
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequiredSecuritySchemes gets all the distinct security schemes that are specified in the swagger spec
|
|
||||||
func (s *Spec) RequiredSecuritySchemes() []string {
|
|
||||||
return s.structMapKeys(s.authSchemes)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SchemaRef is a reference to a schema
|
|
||||||
type SchemaRef struct {
|
|
||||||
Name string
|
|
||||||
Ref spec.Ref
|
|
||||||
Schema *spec.Schema
|
|
||||||
}
|
|
||||||
|
|
||||||
// SchemasWithAllOf returns schema references to all schemas that are defined
|
|
||||||
// with an allOf key
|
|
||||||
func (s *Spec) SchemasWithAllOf() (result []SchemaRef) {
|
|
||||||
for _, v := range s.allOfs {
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllDefinitions returns schema references for all the definitions that were discovered
|
|
||||||
func (s *Spec) AllDefinitions() (result []SchemaRef) {
|
|
||||||
for _, v := range s.allSchemas {
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllDefinitionReferences returns json refs for all the discovered schemas
|
|
||||||
func (s *Spec) AllDefinitionReferences() (result []string) {
|
|
||||||
for _, v := range s.references.schemas {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllParameterReferences returns json refs for all the discovered parameters
|
|
||||||
func (s *Spec) AllParameterReferences() (result []string) {
|
|
||||||
for _, v := range s.references.parameters {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllResponseReferences returns json refs for all the discovered responses
|
|
||||||
func (s *Spec) AllResponseReferences() (result []string) {
|
|
||||||
for _, v := range s.references.responses {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllItemsReferences returns the references for all the items
|
|
||||||
func (s *Spec) AllItemsReferences() (result []string) {
|
|
||||||
for _, v := range s.references.items {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllReferences returns all the references found in the document
|
|
||||||
func (s *Spec) AllReferences() (result []string) {
|
|
||||||
for _, v := range s.references.allRefs {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllRefs returns all the unique references found in the document
|
|
||||||
func (s *Spec) AllRefs() (result []spec.Ref) {
|
|
||||||
set := make(map[string]struct{})
|
|
||||||
for _, v := range s.references.allRefs {
|
|
||||||
a := v.String()
|
|
||||||
if a == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := set[a]; !ok {
|
|
||||||
set[a] = struct{}{}
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
5
vendor/github.com/go-openapi/loads/README.md
generated
vendored
5
vendor/github.com/go-openapi/loads/README.md
generated
vendored
|
@ -1,5 +0,0 @@
|
||||||
# Loads OAI specs [![Build Status](https://ci.vmware.run/api/badges/go-openapi/loads/status.svg)](https://ci.vmware.run/go-openapi/loads) [![Coverage](https://coverage.vmware.run/badges/go-openapi/loads/coverage.svg)](https://coverage.vmware.run/go-openapi/loads) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
|
|
||||||
|
|
||||||
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/loads/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/loads?status.svg)](http://godoc.org/github.com/go-openapi/loads)
|
|
||||||
|
|
||||||
Loading of OAI specification documents from local or remote locations.
|
|
203
vendor/github.com/go-openapi/loads/spec.go
generated
vendored
203
vendor/github.com/go-openapi/loads/spec.go
generated
vendored
|
@ -1,203 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package loads
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
|
|
||||||
"github.com/go-openapi/analysis"
|
|
||||||
"github.com/go-openapi/spec"
|
|
||||||
"github.com/go-openapi/swag"
|
|
||||||
)
|
|
||||||
|
|
||||||
// JSONDoc loads a json document from either a file or a remote url
|
|
||||||
func JSONDoc(path string) (json.RawMessage, error) {
|
|
||||||
data, err := swag.LoadFromFileOrHTTP(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return json.RawMessage(data), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// DocLoader represents a doc loader type
|
|
||||||
type DocLoader func(string) (json.RawMessage, error)
|
|
||||||
|
|
||||||
// DocMatcher represents a predicate to check if a loader matches
|
|
||||||
type DocMatcher func(string) bool
|
|
||||||
|
|
||||||
var loaders = &loader{Match: func(_ string) bool { return true }, Fn: JSONDoc}
|
|
||||||
|
|
||||||
// AddLoader for a document
|
|
||||||
func AddLoader(predicate DocMatcher, load DocLoader) {
|
|
||||||
prev := loaders
|
|
||||||
loaders = &loader{
|
|
||||||
Match: predicate,
|
|
||||||
Fn: load,
|
|
||||||
Next: prev,
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
type loader struct {
|
|
||||||
Fn DocLoader
|
|
||||||
Match DocMatcher
|
|
||||||
Next *loader
|
|
||||||
}
|
|
||||||
|
|
||||||
// JSONSpec loads a spec from a json document
|
|
||||||
func JSONSpec(path string) (*Document, error) {
|
|
||||||
data, err := JSONDoc(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// convert to json
|
|
||||||
return Analyzed(json.RawMessage(data), "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Document represents a swagger spec document
|
|
||||||
type Document struct {
|
|
||||||
// specAnalyzer
|
|
||||||
Analyzer *analysis.Spec
|
|
||||||
spec *spec.Swagger
|
|
||||||
origSpec *spec.Swagger
|
|
||||||
schema *spec.Schema
|
|
||||||
raw json.RawMessage
|
|
||||||
}
|
|
||||||
|
|
||||||
// Spec loads a new spec document
|
|
||||||
func Spec(path string) (*Document, error) {
|
|
||||||
specURL, err := url.Parse(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for l := loaders.Next; l != nil; l = l.Next {
|
|
||||||
if loaders.Match(specURL.Path) {
|
|
||||||
b, err2 := loaders.Fn(path)
|
|
||||||
if err2 != nil {
|
|
||||||
return nil, err2
|
|
||||||
}
|
|
||||||
return Analyzed(b, "")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
b, err := loaders.Fn(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return Analyzed(b, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
var swag20Schema = spec.MustLoadSwagger20Schema()
|
|
||||||
|
|
||||||
// Analyzed creates a new analyzed spec document
|
|
||||||
func Analyzed(data json.RawMessage, version string) (*Document, error) {
|
|
||||||
if version == "" {
|
|
||||||
version = "2.0"
|
|
||||||
}
|
|
||||||
if version != "2.0" {
|
|
||||||
return nil, fmt.Errorf("spec version %q is not supported", version)
|
|
||||||
}
|
|
||||||
|
|
||||||
swspec := new(spec.Swagger)
|
|
||||||
if err := json.Unmarshal(data, swspec); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
origsqspec := new(spec.Swagger)
|
|
||||||
if err := json.Unmarshal(data, origsqspec); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
d := &Document{
|
|
||||||
Analyzer: analysis.New(swspec),
|
|
||||||
schema: swag20Schema,
|
|
||||||
spec: swspec,
|
|
||||||
raw: data,
|
|
||||||
origSpec: origsqspec,
|
|
||||||
}
|
|
||||||
return d, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Expanded expands the ref fields in the spec document and returns a new spec document
|
|
||||||
func (d *Document) Expanded() (*Document, error) {
|
|
||||||
swspec := new(spec.Swagger)
|
|
||||||
if err := json.Unmarshal(d.raw, swspec); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := spec.ExpandSpec(swspec); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
dd := &Document{
|
|
||||||
Analyzer: analysis.New(swspec),
|
|
||||||
spec: swspec,
|
|
||||||
schema: swag20Schema,
|
|
||||||
raw: d.raw,
|
|
||||||
origSpec: d.origSpec,
|
|
||||||
}
|
|
||||||
return dd, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// BasePath the base path for this spec
|
|
||||||
func (d *Document) BasePath() string {
|
|
||||||
return d.spec.BasePath
|
|
||||||
}
|
|
||||||
|
|
||||||
// Version returns the version of this spec
|
|
||||||
func (d *Document) Version() string {
|
|
||||||
return d.spec.Swagger
|
|
||||||
}
|
|
||||||
|
|
||||||
// Schema returns the swagger 2.0 schema
|
|
||||||
func (d *Document) Schema() *spec.Schema {
|
|
||||||
return d.schema
|
|
||||||
}
|
|
||||||
|
|
||||||
// Spec returns the swagger spec object model
|
|
||||||
func (d *Document) Spec() *spec.Swagger {
|
|
||||||
return d.spec
|
|
||||||
}
|
|
||||||
|
|
||||||
// Host returns the host for the API
|
|
||||||
func (d *Document) Host() string {
|
|
||||||
return d.spec.Host
|
|
||||||
}
|
|
||||||
|
|
||||||
// Raw returns the raw swagger spec as json bytes
|
|
||||||
func (d *Document) Raw() json.RawMessage {
|
|
||||||
return d.raw
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Document) OrigSpec() *spec.Swagger {
|
|
||||||
return d.origSpec
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResetDefinitions gives a shallow copy with the models reset
|
|
||||||
func (d *Document) ResetDefinitions() *Document {
|
|
||||||
defs := make(map[string]spec.Schema, len(d.origSpec.Definitions))
|
|
||||||
for k, v := range d.origSpec.Definitions {
|
|
||||||
defs[k] = v
|
|
||||||
}
|
|
||||||
|
|
||||||
d.spec.Definitions = defs
|
|
||||||
return d
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pristine creates a new pristine document instance based on the input data
|
|
||||||
func (d *Document) Pristine() *Document {
|
|
||||||
dd, _ := Analyzed(d.Raw(), d.Version())
|
|
||||||
return dd
|
|
||||||
}
|
|
44
vendor/github.com/gogo/protobuf/README
generated
vendored
44
vendor/github.com/gogo/protobuf/README
generated
vendored
|
@ -207,6 +207,50 @@ the --go_out argument to protoc:
|
||||||
|
|
||||||
protoc --gogo_out=plugins=grpc:. *.proto
|
protoc --gogo_out=plugins=grpc:. *.proto
|
||||||
|
|
||||||
|
## Compatibility ##
|
||||||
|
|
||||||
|
The library and the generated code are expected to be stable over time.
|
||||||
|
However, we reserve the right to make breaking changes without notice for the
|
||||||
|
following reasons:
|
||||||
|
|
||||||
|
- Security. A security issue in the specification or implementation may come to
|
||||||
|
light whose resolution requires breaking compatibility. We reserve the right
|
||||||
|
to address such security issues.
|
||||||
|
- Unspecified behavior. There are some aspects of the Protocol Buffers
|
||||||
|
specification that are undefined. Programs that depend on such unspecified
|
||||||
|
behavior may break in future releases.
|
||||||
|
- Specification errors or changes. If it becomes necessary to address an
|
||||||
|
inconsistency, incompleteness, or change in the Protocol Buffers
|
||||||
|
specification, resolving the issue could affect the meaning or legality of
|
||||||
|
existing programs. We reserve the right to address such issues, including
|
||||||
|
updating the implementations.
|
||||||
|
- Bugs. If the library has a bug that violates the specification, a program
|
||||||
|
that depends on the buggy behavior may break if the bug is fixed. We reserve
|
||||||
|
the right to fix such bugs.
|
||||||
|
- Adding methods or fields to generated structs. These may conflict with field
|
||||||
|
names that already exist in a schema, causing applications to break. When the
|
||||||
|
code generator encounters a field in the schema that would collide with a
|
||||||
|
generated field or method name, the code generator will append an underscore
|
||||||
|
to the generated field or method name.
|
||||||
|
- Adding, removing, or changing methods or fields in generated structs that
|
||||||
|
start with `XXX`. These parts of the generated code are exported out of
|
||||||
|
necessity, but should not be considered part of the public API.
|
||||||
|
- Adding, removing, or changing unexported symbols in generated code.
|
||||||
|
|
||||||
|
Any breaking changes outside of these will be announced 6 months in advance to
|
||||||
|
protobuf@googlegroups.com.
|
||||||
|
|
||||||
|
You should, whenever possible, use generated code created by the `protoc-gen-go`
|
||||||
|
tool built at the same commit as the `proto` package. The `proto` package
|
||||||
|
declares package-level constants in the form `ProtoPackageIsVersionX`.
|
||||||
|
Application code and generated code may depend on one of these constants to
|
||||||
|
ensure that compilation will fail if the available version of the proto library
|
||||||
|
is too old. Whenever we make a change to the generated code that requires newer
|
||||||
|
library support, in the same commit we will increment the version number of the
|
||||||
|
generated code and declare a new package-level constant whose name incorporates
|
||||||
|
the latest version number. Removing a compatibility constant is considered a
|
||||||
|
breaking change and would be subject to the announcement policy stated above.
|
||||||
|
|
||||||
## Plugins ##
|
## Plugins ##
|
||||||
|
|
||||||
The `protoc-gen-go/generator` package exposes a plugin interface,
|
The `protoc-gen-go/generator` package exposes a plugin interface,
|
||||||
|
|
34
vendor/github.com/gogo/protobuf/Readme.md
generated
vendored
34
vendor/github.com/gogo/protobuf/Readme.md
generated
vendored
|
@ -20,22 +20,25 @@ Keeping track of how up to date gogoprotobuf is relative to golang/protobuf is d
|
||||||
|
|
||||||
These projects use gogoprotobuf:
|
These projects use gogoprotobuf:
|
||||||
|
|
||||||
- <a href="http://godoc.org/github.com/coreos/etcd">etcd</a> - <a href="https://blog.gopheracademy.com/advent-2015/etcd-distributed-key-value-store-with-grpc-http2/">blog</a>
|
- <a href="http://godoc.org/github.com/coreos/etcd">etcd</a> - <a href="https://blog.gopheracademy.com/advent-2015/etcd-distributed-key-value-store-with-grpc-http2/">blog</a> - <a href="https://github.com/coreos/etcd/blob/master/etcdserver/etcdserverpb/etcdserver.proto">sample proto file</a>
|
||||||
- <a href="https://www.spacemonkey.com/">spacemonkey</a> - <a href="https://www.spacemonkey.com/blog/posts/go-space-monkey">blog</a>
|
- <a href="https://www.spacemonkey.com/">spacemonkey</a> - <a href="https://www.spacemonkey.com/blog/posts/go-space-monkey">blog</a>
|
||||||
- <a href="http://bazil.org">bazil</a>
|
- <a href="http://badoo.com">badoo</a> - <a href="https://github.com/badoo/lsd/blob/32061f501c5eca9c76c596d790b450501ba27b2f/proto/lsd.proto">sample proto file</a>
|
||||||
- <a href="http://badoo.com">badoo</a>
|
- <a href="https://github.com/mesos/mesos-go">mesos-go</a> - <a href="https://github.com/mesos/mesos-go/blob/f9e5fb7c2f50ab5f23299f26b6b07c5d6afdd252/api/v0/mesosproto/authentication.proto">sample proto file</a>
|
||||||
- <a href="https://github.com/mesos/mesos-go">mesos-go</a>
|
- <a href="https://github.com/mozilla-services/heka">heka</a> - <a href="https://github.com/mozilla-services/heka/commit/eb72fbf7d2d28249fbaf8d8dc6607f4eb6f03351">the switch from golang/protobuf to gogo/protobuf when it was still on code.google.com</a>
|
||||||
- <a href="https://github.com/mozilla-services/heka">heka</a>
|
- <a href="https://github.com/cockroachdb/cockroach">cockroachdb</a> - <a href="https://github.com/cockroachdb/cockroach/blob/651d54d393e391a30154e9117ab4b18d9ee6d845/roachpb/metadata.proto">sample proto file</a>
|
||||||
- <a href="https://github.com/cockroachdb/cockroach">cockroachdb</a>
|
- <a href="https://github.com/jbenet/go-ipfs">go-ipfs</a> - <a href="https://github.com/ipfs/go-ipfs/blob/2b6da0c024f28abeb16947fb452787196a6b56a2/merkledag/pb/merkledag.proto">sample proto file</a>
|
||||||
- <a href="https://github.com/jbenet/go-ipfs">go-ipfs</a>
|
- <a href="https://github.com/philhofer/rkive">rkive-go</a> - <a href="https://github.com/philhofer/rkive/blob/e5dd884d3ea07b341321073882ae28aa16dd11be/rpbc/riak_dt.proto">sample proto file</a>
|
||||||
- <a href="https://github.com/philhofer/rkive">rkive-go</a>
|
|
||||||
- <a href="https://www.dropbox.com">dropbox</a>
|
- <a href="https://www.dropbox.com">dropbox</a>
|
||||||
- <a href="https://srclib.org/">srclib</a> - <a href="https://sourcegraph.com/sourcegraph.com/sourcegraph/srclib@97f54fed4f9a4bff0a28edf6eb7c0e013afc7bcd/.tree/graph/def.proto">sample proto file</a>
|
- <a href="https://srclib.org/">srclib</a> - <a href="https://github.com/sourcegraph/srclib/blob/6538858f0c410cac5c63440317b8d009e889d3fb/graph/def.proto">sample proto file</a>
|
||||||
- <a href="http://www.adyoulike.com/">adyoulike</a>
|
- <a href="http://www.adyoulike.com/">adyoulike</a>
|
||||||
- <a href="http://www.cloudfoundry.org/">cloudfoundry</a>
|
- <a href="http://www.cloudfoundry.org/">cloudfoundry</a> - <a href="https://github.com/cloudfoundry/bbs/blob/d673710b8c4211037805129944ee4c5373d6588a/models/events.proto">sample proto file</a>
|
||||||
- <a href="http://kubernetes.io/">kubernetes</a>
|
- <a href="http://kubernetes.io/">kubernetes</a> - <a href="https://github.com/kubernetes/kubernetes/tree/88d8628137f94ee816aaa6606ae8cd045dee0bff/cmd/libs/go2idl">go2idl built on top of gogoprotobuf</a>
|
||||||
- <a href="https://dgraph.io/">dgraph</a> - <a href="https://github.com/dgraph-io/dgraph/releases/tag/v0.4.3">release notes</a> - <a href="https://discuss.dgraph.io/t/gogoprotobuf-is-extremely-fast/639">benchmarks</a></a>
|
- <a href="https://dgraph.io/">dgraph</a> - <a href="https://github.com/dgraph-io/dgraph/releases/tag/v0.4.3">release notes</a> - <a href="https://discuss.dgraph.io/t/gogoprotobuf-is-extremely-fast/639">benchmarks</a></a>
|
||||||
- <a href="https://github.com/centrifugal/centrifugo">centrifugo</a> - <a href="https://forum.golangbridge.org/t/centrifugo-real-time-messaging-websocket-or-sockjs-server-v1-5-0-released/2861">release notes</a> - <a href="https://medium.com/@fzambia/centrifugo-protobuf-inside-json-outside-21d39bdabd68#.o3icmgjqd">blog</a>
|
- <a href="https://github.com/centrifugal/centrifugo">centrifugo</a> - <a href="https://forum.golangbridge.org/t/centrifugo-real-time-messaging-websocket-or-sockjs-server-v1-5-0-released/2861">release notes</a> - <a href="https://medium.com/@fzambia/centrifugo-protobuf-inside-json-outside-21d39bdabd68#.o3icmgjqd">blog</a>
|
||||||
|
- <a href="https://github.com/docker/swarmkit">docker swarmkit</a> - <a href="https://github.com/docker/swarmkit/blob/63600e01af3b8da2a0ed1c9fa6e1ae4299d75edb/api/objects.proto">sample proto file</a>
|
||||||
|
- <a href="https://nats.io/">nats.io</a> - <a href="https://github.com/nats-io/go-nats-streaming/blob/master/pb/protocol.proto">go-nats-streaming</a>
|
||||||
|
- <a href="https://github.com/pingcap/tidb">tidb</a> - Communication between <a href="https://github.com/pingcap/tipb/blob/master/generate-go.sh#L4">tidb</a> and <a href="https://github.com/pingcap/kvproto/blob/master/generate_go.sh#L3">tikv</a>
|
||||||
|
- <a href="https://github.com/AsynkronIT/protoactor-go">protoactor-go</a> - <a href="https://github.com/AsynkronIT/protoactor-go/blob/dev/protobuf/protoc-gen-protoactor/main.go">vanity command</a> that also generates actors from service definitions
|
||||||
|
|
||||||
Please lets us know if you are using gogoprotobuf by posting on our <a href="https://groups.google.com/forum/#!topic/gogoprotobuf/Brw76BxmFpQ">GoogleGroup</a>.
|
Please lets us know if you are using gogoprotobuf by posting on our <a href="https://groups.google.com/forum/#!topic/gogoprotobuf/Brw76BxmFpQ">GoogleGroup</a>.
|
||||||
|
|
||||||
|
@ -56,10 +59,10 @@ After that you can choose:
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
To install it, you must first have Go (at least version 1.3.3) installed (see [http://golang.org/doc/install](http://golang.org/doc/install)). Go 1.4.2, 1.5.4, 1.6.3 and 1.7 are continuously tested.
|
To install it, you must first have Go (at least version 1.6.3) installed (see [http://golang.org/doc/install](http://golang.org/doc/install)). Go 1.7.1 and 1.8 are continuously tested.
|
||||||
|
|
||||||
Next, install the standard protocol buffer implementation from [https://github.com/google/protobuf](https://github.com/google/protobuf).
|
Next, install the standard protocol buffer implementation from [https://github.com/google/protobuf](https://github.com/google/protobuf).
|
||||||
Most versions from 2.3.1 should not give any problems, but 2.5.0, 2.6.1 and 3 are continuously tested.
|
Most versions from 2.3.1 should not give any problems, but 2.6.1, 3.0.2 and 3.2.0 are continuously tested.
|
||||||
|
|
||||||
### Speed
|
### Speed
|
||||||
|
|
||||||
|
@ -106,11 +109,6 @@ Install protoc-gen-gogo:
|
||||||
go get github.com/gogo/protobuf/protoc-gen-gogo
|
go get github.com/gogo/protobuf/protoc-gen-gogo
|
||||||
go get github.com/gogo/protobuf/gogoproto
|
go get github.com/gogo/protobuf/gogoproto
|
||||||
|
|
||||||
## Proto3
|
|
||||||
|
|
||||||
Proto3 is supported, but the new well known types are not supported yet.
|
|
||||||
[See Proto3 Issue](https://github.com/gogo/protobuf/issues/57) for more details.
|
|
||||||
|
|
||||||
## GRPC
|
## GRPC
|
||||||
|
|
||||||
It works the same as golang/protobuf, simply specify the plugin.
|
It works the same as golang/protobuf, simply specify the plugin.
|
||||||
|
|
1
vendor/github.com/gogo/protobuf/gogoproto/doc.go
generated
vendored
1
vendor/github.com/gogo/protobuf/gogoproto/doc.go
generated
vendored
|
@ -148,6 +148,7 @@ The enumprefix, getters and stringer extensions can be used to remove some of th
|
||||||
- goproto_stringer, if false, the message is generated without the default string method, this is useful for rather using stringer, or allowing you to write your own string method.
|
- goproto_stringer, if false, the message is generated without the default string method, this is useful for rather using stringer, or allowing you to write your own string method.
|
||||||
- goproto_extensions_map (beta), if false, the extensions field is generated as type []byte instead of type map[int32]proto.Extension
|
- goproto_extensions_map (beta), if false, the extensions field is generated as type []byte instead of type map[int32]proto.Extension
|
||||||
- goproto_unrecognized (beta), if false, XXX_unrecognized field is not generated. This is useful in conjunction with gogoproto.nullable=false, to generate structures completely devoid of pointers and reduce GC pressure at the cost of losing information about unrecognized fields.
|
- goproto_unrecognized (beta), if false, XXX_unrecognized field is not generated. This is useful in conjunction with gogoproto.nullable=false, to generate structures completely devoid of pointers and reduce GC pressure at the cost of losing information about unrecognized fields.
|
||||||
|
- goproto_registration (beta), if true, the generated files will register all messages and types against both gogo/protobuf and golang/protobuf. This is necessary when using third-party packages which read registrations from golang/protobuf (such as the grpc-gateway).
|
||||||
|
|
||||||
Less Typing and Peace of Mind is explained in their specific plugin folders godoc:
|
Less Typing and Peace of Mind is explained in their specific plugin folders godoc:
|
||||||
|
|
||||||
|
|
279
vendor/github.com/gogo/protobuf/gogoproto/gogo.pb.go
generated
vendored
279
vendor/github.com/gogo/protobuf/gogoproto/gogo.pb.go
generated
vendored
|
@ -34,6 +34,7 @@ var E_GoprotoEnumPrefix = &proto.ExtensionDesc{
|
||||||
Field: 62001,
|
Field: 62001,
|
||||||
Name: "gogoproto.goproto_enum_prefix",
|
Name: "gogoproto.goproto_enum_prefix",
|
||||||
Tag: "varint,62001,opt,name=goproto_enum_prefix,json=goprotoEnumPrefix",
|
Tag: "varint,62001,opt,name=goproto_enum_prefix,json=goprotoEnumPrefix",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoEnumStringer = &proto.ExtensionDesc{
|
var E_GoprotoEnumStringer = &proto.ExtensionDesc{
|
||||||
|
@ -42,6 +43,7 @@ var E_GoprotoEnumStringer = &proto.ExtensionDesc{
|
||||||
Field: 62021,
|
Field: 62021,
|
||||||
Name: "gogoproto.goproto_enum_stringer",
|
Name: "gogoproto.goproto_enum_stringer",
|
||||||
Tag: "varint,62021,opt,name=goproto_enum_stringer,json=goprotoEnumStringer",
|
Tag: "varint,62021,opt,name=goproto_enum_stringer,json=goprotoEnumStringer",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_EnumStringer = &proto.ExtensionDesc{
|
var E_EnumStringer = &proto.ExtensionDesc{
|
||||||
|
@ -50,6 +52,7 @@ var E_EnumStringer = &proto.ExtensionDesc{
|
||||||
Field: 62022,
|
Field: 62022,
|
||||||
Name: "gogoproto.enum_stringer",
|
Name: "gogoproto.enum_stringer",
|
||||||
Tag: "varint,62022,opt,name=enum_stringer,json=enumStringer",
|
Tag: "varint,62022,opt,name=enum_stringer,json=enumStringer",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_EnumCustomname = &proto.ExtensionDesc{
|
var E_EnumCustomname = &proto.ExtensionDesc{
|
||||||
|
@ -58,6 +61,16 @@ var E_EnumCustomname = &proto.ExtensionDesc{
|
||||||
Field: 62023,
|
Field: 62023,
|
||||||
Name: "gogoproto.enum_customname",
|
Name: "gogoproto.enum_customname",
|
||||||
Tag: "bytes,62023,opt,name=enum_customname,json=enumCustomname",
|
Tag: "bytes,62023,opt,name=enum_customname,json=enumCustomname",
|
||||||
|
Filename: "gogo.proto",
|
||||||
|
}
|
||||||
|
|
||||||
|
var E_Enumdecl = &proto.ExtensionDesc{
|
||||||
|
ExtendedType: (*google_protobuf.EnumOptions)(nil),
|
||||||
|
ExtensionType: (*bool)(nil),
|
||||||
|
Field: 62024,
|
||||||
|
Name: "gogoproto.enumdecl",
|
||||||
|
Tag: "varint,62024,opt,name=enumdecl",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_EnumvalueCustomname = &proto.ExtensionDesc{
|
var E_EnumvalueCustomname = &proto.ExtensionDesc{
|
||||||
|
@ -66,6 +79,7 @@ var E_EnumvalueCustomname = &proto.ExtensionDesc{
|
||||||
Field: 66001,
|
Field: 66001,
|
||||||
Name: "gogoproto.enumvalue_customname",
|
Name: "gogoproto.enumvalue_customname",
|
||||||
Tag: "bytes,66001,opt,name=enumvalue_customname,json=enumvalueCustomname",
|
Tag: "bytes,66001,opt,name=enumvalue_customname,json=enumvalueCustomname",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoGettersAll = &proto.ExtensionDesc{
|
var E_GoprotoGettersAll = &proto.ExtensionDesc{
|
||||||
|
@ -74,6 +88,7 @@ var E_GoprotoGettersAll = &proto.ExtensionDesc{
|
||||||
Field: 63001,
|
Field: 63001,
|
||||||
Name: "gogoproto.goproto_getters_all",
|
Name: "gogoproto.goproto_getters_all",
|
||||||
Tag: "varint,63001,opt,name=goproto_getters_all,json=goprotoGettersAll",
|
Tag: "varint,63001,opt,name=goproto_getters_all,json=goprotoGettersAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoEnumPrefixAll = &proto.ExtensionDesc{
|
var E_GoprotoEnumPrefixAll = &proto.ExtensionDesc{
|
||||||
|
@ -82,6 +97,7 @@ var E_GoprotoEnumPrefixAll = &proto.ExtensionDesc{
|
||||||
Field: 63002,
|
Field: 63002,
|
||||||
Name: "gogoproto.goproto_enum_prefix_all",
|
Name: "gogoproto.goproto_enum_prefix_all",
|
||||||
Tag: "varint,63002,opt,name=goproto_enum_prefix_all,json=goprotoEnumPrefixAll",
|
Tag: "varint,63002,opt,name=goproto_enum_prefix_all,json=goprotoEnumPrefixAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoStringerAll = &proto.ExtensionDesc{
|
var E_GoprotoStringerAll = &proto.ExtensionDesc{
|
||||||
|
@ -90,6 +106,7 @@ var E_GoprotoStringerAll = &proto.ExtensionDesc{
|
||||||
Field: 63003,
|
Field: 63003,
|
||||||
Name: "gogoproto.goproto_stringer_all",
|
Name: "gogoproto.goproto_stringer_all",
|
||||||
Tag: "varint,63003,opt,name=goproto_stringer_all,json=goprotoStringerAll",
|
Tag: "varint,63003,opt,name=goproto_stringer_all,json=goprotoStringerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_VerboseEqualAll = &proto.ExtensionDesc{
|
var E_VerboseEqualAll = &proto.ExtensionDesc{
|
||||||
|
@ -98,6 +115,7 @@ var E_VerboseEqualAll = &proto.ExtensionDesc{
|
||||||
Field: 63004,
|
Field: 63004,
|
||||||
Name: "gogoproto.verbose_equal_all",
|
Name: "gogoproto.verbose_equal_all",
|
||||||
Tag: "varint,63004,opt,name=verbose_equal_all,json=verboseEqualAll",
|
Tag: "varint,63004,opt,name=verbose_equal_all,json=verboseEqualAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_FaceAll = &proto.ExtensionDesc{
|
var E_FaceAll = &proto.ExtensionDesc{
|
||||||
|
@ -106,6 +124,7 @@ var E_FaceAll = &proto.ExtensionDesc{
|
||||||
Field: 63005,
|
Field: 63005,
|
||||||
Name: "gogoproto.face_all",
|
Name: "gogoproto.face_all",
|
||||||
Tag: "varint,63005,opt,name=face_all,json=faceAll",
|
Tag: "varint,63005,opt,name=face_all,json=faceAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GostringAll = &proto.ExtensionDesc{
|
var E_GostringAll = &proto.ExtensionDesc{
|
||||||
|
@ -114,6 +133,7 @@ var E_GostringAll = &proto.ExtensionDesc{
|
||||||
Field: 63006,
|
Field: 63006,
|
||||||
Name: "gogoproto.gostring_all",
|
Name: "gogoproto.gostring_all",
|
||||||
Tag: "varint,63006,opt,name=gostring_all,json=gostringAll",
|
Tag: "varint,63006,opt,name=gostring_all,json=gostringAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_PopulateAll = &proto.ExtensionDesc{
|
var E_PopulateAll = &proto.ExtensionDesc{
|
||||||
|
@ -122,6 +142,7 @@ var E_PopulateAll = &proto.ExtensionDesc{
|
||||||
Field: 63007,
|
Field: 63007,
|
||||||
Name: "gogoproto.populate_all",
|
Name: "gogoproto.populate_all",
|
||||||
Tag: "varint,63007,opt,name=populate_all,json=populateAll",
|
Tag: "varint,63007,opt,name=populate_all,json=populateAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_StringerAll = &proto.ExtensionDesc{
|
var E_StringerAll = &proto.ExtensionDesc{
|
||||||
|
@ -130,6 +151,7 @@ var E_StringerAll = &proto.ExtensionDesc{
|
||||||
Field: 63008,
|
Field: 63008,
|
||||||
Name: "gogoproto.stringer_all",
|
Name: "gogoproto.stringer_all",
|
||||||
Tag: "varint,63008,opt,name=stringer_all,json=stringerAll",
|
Tag: "varint,63008,opt,name=stringer_all,json=stringerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_OnlyoneAll = &proto.ExtensionDesc{
|
var E_OnlyoneAll = &proto.ExtensionDesc{
|
||||||
|
@ -138,6 +160,7 @@ var E_OnlyoneAll = &proto.ExtensionDesc{
|
||||||
Field: 63009,
|
Field: 63009,
|
||||||
Name: "gogoproto.onlyone_all",
|
Name: "gogoproto.onlyone_all",
|
||||||
Tag: "varint,63009,opt,name=onlyone_all,json=onlyoneAll",
|
Tag: "varint,63009,opt,name=onlyone_all,json=onlyoneAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_EqualAll = &proto.ExtensionDesc{
|
var E_EqualAll = &proto.ExtensionDesc{
|
||||||
|
@ -146,6 +169,7 @@ var E_EqualAll = &proto.ExtensionDesc{
|
||||||
Field: 63013,
|
Field: 63013,
|
||||||
Name: "gogoproto.equal_all",
|
Name: "gogoproto.equal_all",
|
||||||
Tag: "varint,63013,opt,name=equal_all,json=equalAll",
|
Tag: "varint,63013,opt,name=equal_all,json=equalAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_DescriptionAll = &proto.ExtensionDesc{
|
var E_DescriptionAll = &proto.ExtensionDesc{
|
||||||
|
@ -154,6 +178,7 @@ var E_DescriptionAll = &proto.ExtensionDesc{
|
||||||
Field: 63014,
|
Field: 63014,
|
||||||
Name: "gogoproto.description_all",
|
Name: "gogoproto.description_all",
|
||||||
Tag: "varint,63014,opt,name=description_all,json=descriptionAll",
|
Tag: "varint,63014,opt,name=description_all,json=descriptionAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_TestgenAll = &proto.ExtensionDesc{
|
var E_TestgenAll = &proto.ExtensionDesc{
|
||||||
|
@ -162,6 +187,7 @@ var E_TestgenAll = &proto.ExtensionDesc{
|
||||||
Field: 63015,
|
Field: 63015,
|
||||||
Name: "gogoproto.testgen_all",
|
Name: "gogoproto.testgen_all",
|
||||||
Tag: "varint,63015,opt,name=testgen_all,json=testgenAll",
|
Tag: "varint,63015,opt,name=testgen_all,json=testgenAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_BenchgenAll = &proto.ExtensionDesc{
|
var E_BenchgenAll = &proto.ExtensionDesc{
|
||||||
|
@ -170,6 +196,7 @@ var E_BenchgenAll = &proto.ExtensionDesc{
|
||||||
Field: 63016,
|
Field: 63016,
|
||||||
Name: "gogoproto.benchgen_all",
|
Name: "gogoproto.benchgen_all",
|
||||||
Tag: "varint,63016,opt,name=benchgen_all,json=benchgenAll",
|
Tag: "varint,63016,opt,name=benchgen_all,json=benchgenAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_MarshalerAll = &proto.ExtensionDesc{
|
var E_MarshalerAll = &proto.ExtensionDesc{
|
||||||
|
@ -178,6 +205,7 @@ var E_MarshalerAll = &proto.ExtensionDesc{
|
||||||
Field: 63017,
|
Field: 63017,
|
||||||
Name: "gogoproto.marshaler_all",
|
Name: "gogoproto.marshaler_all",
|
||||||
Tag: "varint,63017,opt,name=marshaler_all,json=marshalerAll",
|
Tag: "varint,63017,opt,name=marshaler_all,json=marshalerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_UnmarshalerAll = &proto.ExtensionDesc{
|
var E_UnmarshalerAll = &proto.ExtensionDesc{
|
||||||
|
@ -186,6 +214,7 @@ var E_UnmarshalerAll = &proto.ExtensionDesc{
|
||||||
Field: 63018,
|
Field: 63018,
|
||||||
Name: "gogoproto.unmarshaler_all",
|
Name: "gogoproto.unmarshaler_all",
|
||||||
Tag: "varint,63018,opt,name=unmarshaler_all,json=unmarshalerAll",
|
Tag: "varint,63018,opt,name=unmarshaler_all,json=unmarshalerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_StableMarshalerAll = &proto.ExtensionDesc{
|
var E_StableMarshalerAll = &proto.ExtensionDesc{
|
||||||
|
@ -194,6 +223,7 @@ var E_StableMarshalerAll = &proto.ExtensionDesc{
|
||||||
Field: 63019,
|
Field: 63019,
|
||||||
Name: "gogoproto.stable_marshaler_all",
|
Name: "gogoproto.stable_marshaler_all",
|
||||||
Tag: "varint,63019,opt,name=stable_marshaler_all,json=stableMarshalerAll",
|
Tag: "varint,63019,opt,name=stable_marshaler_all,json=stableMarshalerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_SizerAll = &proto.ExtensionDesc{
|
var E_SizerAll = &proto.ExtensionDesc{
|
||||||
|
@ -202,6 +232,7 @@ var E_SizerAll = &proto.ExtensionDesc{
|
||||||
Field: 63020,
|
Field: 63020,
|
||||||
Name: "gogoproto.sizer_all",
|
Name: "gogoproto.sizer_all",
|
||||||
Tag: "varint,63020,opt,name=sizer_all,json=sizerAll",
|
Tag: "varint,63020,opt,name=sizer_all,json=sizerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoEnumStringerAll = &proto.ExtensionDesc{
|
var E_GoprotoEnumStringerAll = &proto.ExtensionDesc{
|
||||||
|
@ -210,6 +241,7 @@ var E_GoprotoEnumStringerAll = &proto.ExtensionDesc{
|
||||||
Field: 63021,
|
Field: 63021,
|
||||||
Name: "gogoproto.goproto_enum_stringer_all",
|
Name: "gogoproto.goproto_enum_stringer_all",
|
||||||
Tag: "varint,63021,opt,name=goproto_enum_stringer_all,json=goprotoEnumStringerAll",
|
Tag: "varint,63021,opt,name=goproto_enum_stringer_all,json=goprotoEnumStringerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_EnumStringerAll = &proto.ExtensionDesc{
|
var E_EnumStringerAll = &proto.ExtensionDesc{
|
||||||
|
@ -218,6 +250,7 @@ var E_EnumStringerAll = &proto.ExtensionDesc{
|
||||||
Field: 63022,
|
Field: 63022,
|
||||||
Name: "gogoproto.enum_stringer_all",
|
Name: "gogoproto.enum_stringer_all",
|
||||||
Tag: "varint,63022,opt,name=enum_stringer_all,json=enumStringerAll",
|
Tag: "varint,63022,opt,name=enum_stringer_all,json=enumStringerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_UnsafeMarshalerAll = &proto.ExtensionDesc{
|
var E_UnsafeMarshalerAll = &proto.ExtensionDesc{
|
||||||
|
@ -226,6 +259,7 @@ var E_UnsafeMarshalerAll = &proto.ExtensionDesc{
|
||||||
Field: 63023,
|
Field: 63023,
|
||||||
Name: "gogoproto.unsafe_marshaler_all",
|
Name: "gogoproto.unsafe_marshaler_all",
|
||||||
Tag: "varint,63023,opt,name=unsafe_marshaler_all,json=unsafeMarshalerAll",
|
Tag: "varint,63023,opt,name=unsafe_marshaler_all,json=unsafeMarshalerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_UnsafeUnmarshalerAll = &proto.ExtensionDesc{
|
var E_UnsafeUnmarshalerAll = &proto.ExtensionDesc{
|
||||||
|
@ -234,6 +268,7 @@ var E_UnsafeUnmarshalerAll = &proto.ExtensionDesc{
|
||||||
Field: 63024,
|
Field: 63024,
|
||||||
Name: "gogoproto.unsafe_unmarshaler_all",
|
Name: "gogoproto.unsafe_unmarshaler_all",
|
||||||
Tag: "varint,63024,opt,name=unsafe_unmarshaler_all,json=unsafeUnmarshalerAll",
|
Tag: "varint,63024,opt,name=unsafe_unmarshaler_all,json=unsafeUnmarshalerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoExtensionsMapAll = &proto.ExtensionDesc{
|
var E_GoprotoExtensionsMapAll = &proto.ExtensionDesc{
|
||||||
|
@ -242,6 +277,7 @@ var E_GoprotoExtensionsMapAll = &proto.ExtensionDesc{
|
||||||
Field: 63025,
|
Field: 63025,
|
||||||
Name: "gogoproto.goproto_extensions_map_all",
|
Name: "gogoproto.goproto_extensions_map_all",
|
||||||
Tag: "varint,63025,opt,name=goproto_extensions_map_all,json=goprotoExtensionsMapAll",
|
Tag: "varint,63025,opt,name=goproto_extensions_map_all,json=goprotoExtensionsMapAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoUnrecognizedAll = &proto.ExtensionDesc{
|
var E_GoprotoUnrecognizedAll = &proto.ExtensionDesc{
|
||||||
|
@ -250,6 +286,7 @@ var E_GoprotoUnrecognizedAll = &proto.ExtensionDesc{
|
||||||
Field: 63026,
|
Field: 63026,
|
||||||
Name: "gogoproto.goproto_unrecognized_all",
|
Name: "gogoproto.goproto_unrecognized_all",
|
||||||
Tag: "varint,63026,opt,name=goproto_unrecognized_all,json=goprotoUnrecognizedAll",
|
Tag: "varint,63026,opt,name=goproto_unrecognized_all,json=goprotoUnrecognizedAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GogoprotoImport = &proto.ExtensionDesc{
|
var E_GogoprotoImport = &proto.ExtensionDesc{
|
||||||
|
@ -258,6 +295,7 @@ var E_GogoprotoImport = &proto.ExtensionDesc{
|
||||||
Field: 63027,
|
Field: 63027,
|
||||||
Name: "gogoproto.gogoproto_import",
|
Name: "gogoproto.gogoproto_import",
|
||||||
Tag: "varint,63027,opt,name=gogoproto_import,json=gogoprotoImport",
|
Tag: "varint,63027,opt,name=gogoproto_import,json=gogoprotoImport",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_ProtosizerAll = &proto.ExtensionDesc{
|
var E_ProtosizerAll = &proto.ExtensionDesc{
|
||||||
|
@ -266,6 +304,7 @@ var E_ProtosizerAll = &proto.ExtensionDesc{
|
||||||
Field: 63028,
|
Field: 63028,
|
||||||
Name: "gogoproto.protosizer_all",
|
Name: "gogoproto.protosizer_all",
|
||||||
Tag: "varint,63028,opt,name=protosizer_all,json=protosizerAll",
|
Tag: "varint,63028,opt,name=protosizer_all,json=protosizerAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_CompareAll = &proto.ExtensionDesc{
|
var E_CompareAll = &proto.ExtensionDesc{
|
||||||
|
@ -274,6 +313,34 @@ var E_CompareAll = &proto.ExtensionDesc{
|
||||||
Field: 63029,
|
Field: 63029,
|
||||||
Name: "gogoproto.compare_all",
|
Name: "gogoproto.compare_all",
|
||||||
Tag: "varint,63029,opt,name=compare_all,json=compareAll",
|
Tag: "varint,63029,opt,name=compare_all,json=compareAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
|
}
|
||||||
|
|
||||||
|
var E_TypedeclAll = &proto.ExtensionDesc{
|
||||||
|
ExtendedType: (*google_protobuf.FileOptions)(nil),
|
||||||
|
ExtensionType: (*bool)(nil),
|
||||||
|
Field: 63030,
|
||||||
|
Name: "gogoproto.typedecl_all",
|
||||||
|
Tag: "varint,63030,opt,name=typedecl_all,json=typedeclAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
|
}
|
||||||
|
|
||||||
|
var E_EnumdeclAll = &proto.ExtensionDesc{
|
||||||
|
ExtendedType: (*google_protobuf.FileOptions)(nil),
|
||||||
|
ExtensionType: (*bool)(nil),
|
||||||
|
Field: 63031,
|
||||||
|
Name: "gogoproto.enumdecl_all",
|
||||||
|
Tag: "varint,63031,opt,name=enumdecl_all,json=enumdeclAll",
|
||||||
|
Filename: "gogo.proto",
|
||||||
|
}
|
||||||
|
|
||||||
|
var E_GoprotoRegistration = &proto.ExtensionDesc{
|
||||||
|
ExtendedType: (*google_protobuf.FileOptions)(nil),
|
||||||
|
ExtensionType: (*bool)(nil),
|
||||||
|
Field: 63032,
|
||||||
|
Name: "gogoproto.goproto_registration",
|
||||||
|
Tag: "varint,63032,opt,name=goproto_registration,json=goprotoRegistration",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoGetters = &proto.ExtensionDesc{
|
var E_GoprotoGetters = &proto.ExtensionDesc{
|
||||||
|
@ -282,6 +349,7 @@ var E_GoprotoGetters = &proto.ExtensionDesc{
|
||||||
Field: 64001,
|
Field: 64001,
|
||||||
Name: "gogoproto.goproto_getters",
|
Name: "gogoproto.goproto_getters",
|
||||||
Tag: "varint,64001,opt,name=goproto_getters,json=goprotoGetters",
|
Tag: "varint,64001,opt,name=goproto_getters,json=goprotoGetters",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoStringer = &proto.ExtensionDesc{
|
var E_GoprotoStringer = &proto.ExtensionDesc{
|
||||||
|
@ -290,6 +358,7 @@ var E_GoprotoStringer = &proto.ExtensionDesc{
|
||||||
Field: 64003,
|
Field: 64003,
|
||||||
Name: "gogoproto.goproto_stringer",
|
Name: "gogoproto.goproto_stringer",
|
||||||
Tag: "varint,64003,opt,name=goproto_stringer,json=goprotoStringer",
|
Tag: "varint,64003,opt,name=goproto_stringer,json=goprotoStringer",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_VerboseEqual = &proto.ExtensionDesc{
|
var E_VerboseEqual = &proto.ExtensionDesc{
|
||||||
|
@ -298,6 +367,7 @@ var E_VerboseEqual = &proto.ExtensionDesc{
|
||||||
Field: 64004,
|
Field: 64004,
|
||||||
Name: "gogoproto.verbose_equal",
|
Name: "gogoproto.verbose_equal",
|
||||||
Tag: "varint,64004,opt,name=verbose_equal,json=verboseEqual",
|
Tag: "varint,64004,opt,name=verbose_equal,json=verboseEqual",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Face = &proto.ExtensionDesc{
|
var E_Face = &proto.ExtensionDesc{
|
||||||
|
@ -306,6 +376,7 @@ var E_Face = &proto.ExtensionDesc{
|
||||||
Field: 64005,
|
Field: 64005,
|
||||||
Name: "gogoproto.face",
|
Name: "gogoproto.face",
|
||||||
Tag: "varint,64005,opt,name=face",
|
Tag: "varint,64005,opt,name=face",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Gostring = &proto.ExtensionDesc{
|
var E_Gostring = &proto.ExtensionDesc{
|
||||||
|
@ -314,6 +385,7 @@ var E_Gostring = &proto.ExtensionDesc{
|
||||||
Field: 64006,
|
Field: 64006,
|
||||||
Name: "gogoproto.gostring",
|
Name: "gogoproto.gostring",
|
||||||
Tag: "varint,64006,opt,name=gostring",
|
Tag: "varint,64006,opt,name=gostring",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Populate = &proto.ExtensionDesc{
|
var E_Populate = &proto.ExtensionDesc{
|
||||||
|
@ -322,6 +394,7 @@ var E_Populate = &proto.ExtensionDesc{
|
||||||
Field: 64007,
|
Field: 64007,
|
||||||
Name: "gogoproto.populate",
|
Name: "gogoproto.populate",
|
||||||
Tag: "varint,64007,opt,name=populate",
|
Tag: "varint,64007,opt,name=populate",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Stringer = &proto.ExtensionDesc{
|
var E_Stringer = &proto.ExtensionDesc{
|
||||||
|
@ -330,6 +403,7 @@ var E_Stringer = &proto.ExtensionDesc{
|
||||||
Field: 67008,
|
Field: 67008,
|
||||||
Name: "gogoproto.stringer",
|
Name: "gogoproto.stringer",
|
||||||
Tag: "varint,67008,opt,name=stringer",
|
Tag: "varint,67008,opt,name=stringer",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Onlyone = &proto.ExtensionDesc{
|
var E_Onlyone = &proto.ExtensionDesc{
|
||||||
|
@ -338,6 +412,7 @@ var E_Onlyone = &proto.ExtensionDesc{
|
||||||
Field: 64009,
|
Field: 64009,
|
||||||
Name: "gogoproto.onlyone",
|
Name: "gogoproto.onlyone",
|
||||||
Tag: "varint,64009,opt,name=onlyone",
|
Tag: "varint,64009,opt,name=onlyone",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Equal = &proto.ExtensionDesc{
|
var E_Equal = &proto.ExtensionDesc{
|
||||||
|
@ -346,6 +421,7 @@ var E_Equal = &proto.ExtensionDesc{
|
||||||
Field: 64013,
|
Field: 64013,
|
||||||
Name: "gogoproto.equal",
|
Name: "gogoproto.equal",
|
||||||
Tag: "varint,64013,opt,name=equal",
|
Tag: "varint,64013,opt,name=equal",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Description = &proto.ExtensionDesc{
|
var E_Description = &proto.ExtensionDesc{
|
||||||
|
@ -354,6 +430,7 @@ var E_Description = &proto.ExtensionDesc{
|
||||||
Field: 64014,
|
Field: 64014,
|
||||||
Name: "gogoproto.description",
|
Name: "gogoproto.description",
|
||||||
Tag: "varint,64014,opt,name=description",
|
Tag: "varint,64014,opt,name=description",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Testgen = &proto.ExtensionDesc{
|
var E_Testgen = &proto.ExtensionDesc{
|
||||||
|
@ -362,6 +439,7 @@ var E_Testgen = &proto.ExtensionDesc{
|
||||||
Field: 64015,
|
Field: 64015,
|
||||||
Name: "gogoproto.testgen",
|
Name: "gogoproto.testgen",
|
||||||
Tag: "varint,64015,opt,name=testgen",
|
Tag: "varint,64015,opt,name=testgen",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Benchgen = &proto.ExtensionDesc{
|
var E_Benchgen = &proto.ExtensionDesc{
|
||||||
|
@ -370,6 +448,7 @@ var E_Benchgen = &proto.ExtensionDesc{
|
||||||
Field: 64016,
|
Field: 64016,
|
||||||
Name: "gogoproto.benchgen",
|
Name: "gogoproto.benchgen",
|
||||||
Tag: "varint,64016,opt,name=benchgen",
|
Tag: "varint,64016,opt,name=benchgen",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Marshaler = &proto.ExtensionDesc{
|
var E_Marshaler = &proto.ExtensionDesc{
|
||||||
|
@ -378,6 +457,7 @@ var E_Marshaler = &proto.ExtensionDesc{
|
||||||
Field: 64017,
|
Field: 64017,
|
||||||
Name: "gogoproto.marshaler",
|
Name: "gogoproto.marshaler",
|
||||||
Tag: "varint,64017,opt,name=marshaler",
|
Tag: "varint,64017,opt,name=marshaler",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Unmarshaler = &proto.ExtensionDesc{
|
var E_Unmarshaler = &proto.ExtensionDesc{
|
||||||
|
@ -386,6 +466,7 @@ var E_Unmarshaler = &proto.ExtensionDesc{
|
||||||
Field: 64018,
|
Field: 64018,
|
||||||
Name: "gogoproto.unmarshaler",
|
Name: "gogoproto.unmarshaler",
|
||||||
Tag: "varint,64018,opt,name=unmarshaler",
|
Tag: "varint,64018,opt,name=unmarshaler",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_StableMarshaler = &proto.ExtensionDesc{
|
var E_StableMarshaler = &proto.ExtensionDesc{
|
||||||
|
@ -394,6 +475,7 @@ var E_StableMarshaler = &proto.ExtensionDesc{
|
||||||
Field: 64019,
|
Field: 64019,
|
||||||
Name: "gogoproto.stable_marshaler",
|
Name: "gogoproto.stable_marshaler",
|
||||||
Tag: "varint,64019,opt,name=stable_marshaler,json=stableMarshaler",
|
Tag: "varint,64019,opt,name=stable_marshaler,json=stableMarshaler",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Sizer = &proto.ExtensionDesc{
|
var E_Sizer = &proto.ExtensionDesc{
|
||||||
|
@ -402,6 +484,7 @@ var E_Sizer = &proto.ExtensionDesc{
|
||||||
Field: 64020,
|
Field: 64020,
|
||||||
Name: "gogoproto.sizer",
|
Name: "gogoproto.sizer",
|
||||||
Tag: "varint,64020,opt,name=sizer",
|
Tag: "varint,64020,opt,name=sizer",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_UnsafeMarshaler = &proto.ExtensionDesc{
|
var E_UnsafeMarshaler = &proto.ExtensionDesc{
|
||||||
|
@ -410,6 +493,7 @@ var E_UnsafeMarshaler = &proto.ExtensionDesc{
|
||||||
Field: 64023,
|
Field: 64023,
|
||||||
Name: "gogoproto.unsafe_marshaler",
|
Name: "gogoproto.unsafe_marshaler",
|
||||||
Tag: "varint,64023,opt,name=unsafe_marshaler,json=unsafeMarshaler",
|
Tag: "varint,64023,opt,name=unsafe_marshaler,json=unsafeMarshaler",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_UnsafeUnmarshaler = &proto.ExtensionDesc{
|
var E_UnsafeUnmarshaler = &proto.ExtensionDesc{
|
||||||
|
@ -418,6 +502,7 @@ var E_UnsafeUnmarshaler = &proto.ExtensionDesc{
|
||||||
Field: 64024,
|
Field: 64024,
|
||||||
Name: "gogoproto.unsafe_unmarshaler",
|
Name: "gogoproto.unsafe_unmarshaler",
|
||||||
Tag: "varint,64024,opt,name=unsafe_unmarshaler,json=unsafeUnmarshaler",
|
Tag: "varint,64024,opt,name=unsafe_unmarshaler,json=unsafeUnmarshaler",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoExtensionsMap = &proto.ExtensionDesc{
|
var E_GoprotoExtensionsMap = &proto.ExtensionDesc{
|
||||||
|
@ -426,6 +511,7 @@ var E_GoprotoExtensionsMap = &proto.ExtensionDesc{
|
||||||
Field: 64025,
|
Field: 64025,
|
||||||
Name: "gogoproto.goproto_extensions_map",
|
Name: "gogoproto.goproto_extensions_map",
|
||||||
Tag: "varint,64025,opt,name=goproto_extensions_map,json=goprotoExtensionsMap",
|
Tag: "varint,64025,opt,name=goproto_extensions_map,json=goprotoExtensionsMap",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_GoprotoUnrecognized = &proto.ExtensionDesc{
|
var E_GoprotoUnrecognized = &proto.ExtensionDesc{
|
||||||
|
@ -434,6 +520,7 @@ var E_GoprotoUnrecognized = &proto.ExtensionDesc{
|
||||||
Field: 64026,
|
Field: 64026,
|
||||||
Name: "gogoproto.goproto_unrecognized",
|
Name: "gogoproto.goproto_unrecognized",
|
||||||
Tag: "varint,64026,opt,name=goproto_unrecognized,json=goprotoUnrecognized",
|
Tag: "varint,64026,opt,name=goproto_unrecognized,json=goprotoUnrecognized",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Protosizer = &proto.ExtensionDesc{
|
var E_Protosizer = &proto.ExtensionDesc{
|
||||||
|
@ -442,6 +529,7 @@ var E_Protosizer = &proto.ExtensionDesc{
|
||||||
Field: 64028,
|
Field: 64028,
|
||||||
Name: "gogoproto.protosizer",
|
Name: "gogoproto.protosizer",
|
||||||
Tag: "varint,64028,opt,name=protosizer",
|
Tag: "varint,64028,opt,name=protosizer",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Compare = &proto.ExtensionDesc{
|
var E_Compare = &proto.ExtensionDesc{
|
||||||
|
@ -450,6 +538,16 @@ var E_Compare = &proto.ExtensionDesc{
|
||||||
Field: 64029,
|
Field: 64029,
|
||||||
Name: "gogoproto.compare",
|
Name: "gogoproto.compare",
|
||||||
Tag: "varint,64029,opt,name=compare",
|
Tag: "varint,64029,opt,name=compare",
|
||||||
|
Filename: "gogo.proto",
|
||||||
|
}
|
||||||
|
|
||||||
|
var E_Typedecl = &proto.ExtensionDesc{
|
||||||
|
ExtendedType: (*google_protobuf.MessageOptions)(nil),
|
||||||
|
ExtensionType: (*bool)(nil),
|
||||||
|
Field: 64030,
|
||||||
|
Name: "gogoproto.typedecl",
|
||||||
|
Tag: "varint,64030,opt,name=typedecl",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Nullable = &proto.ExtensionDesc{
|
var E_Nullable = &proto.ExtensionDesc{
|
||||||
|
@ -458,6 +556,7 @@ var E_Nullable = &proto.ExtensionDesc{
|
||||||
Field: 65001,
|
Field: 65001,
|
||||||
Name: "gogoproto.nullable",
|
Name: "gogoproto.nullable",
|
||||||
Tag: "varint,65001,opt,name=nullable",
|
Tag: "varint,65001,opt,name=nullable",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Embed = &proto.ExtensionDesc{
|
var E_Embed = &proto.ExtensionDesc{
|
||||||
|
@ -466,6 +565,7 @@ var E_Embed = &proto.ExtensionDesc{
|
||||||
Field: 65002,
|
Field: 65002,
|
||||||
Name: "gogoproto.embed",
|
Name: "gogoproto.embed",
|
||||||
Tag: "varint,65002,opt,name=embed",
|
Tag: "varint,65002,opt,name=embed",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Customtype = &proto.ExtensionDesc{
|
var E_Customtype = &proto.ExtensionDesc{
|
||||||
|
@ -474,6 +574,7 @@ var E_Customtype = &proto.ExtensionDesc{
|
||||||
Field: 65003,
|
Field: 65003,
|
||||||
Name: "gogoproto.customtype",
|
Name: "gogoproto.customtype",
|
||||||
Tag: "bytes,65003,opt,name=customtype",
|
Tag: "bytes,65003,opt,name=customtype",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Customname = &proto.ExtensionDesc{
|
var E_Customname = &proto.ExtensionDesc{
|
||||||
|
@ -482,6 +583,7 @@ var E_Customname = &proto.ExtensionDesc{
|
||||||
Field: 65004,
|
Field: 65004,
|
||||||
Name: "gogoproto.customname",
|
Name: "gogoproto.customname",
|
||||||
Tag: "bytes,65004,opt,name=customname",
|
Tag: "bytes,65004,opt,name=customname",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Jsontag = &proto.ExtensionDesc{
|
var E_Jsontag = &proto.ExtensionDesc{
|
||||||
|
@ -490,6 +592,7 @@ var E_Jsontag = &proto.ExtensionDesc{
|
||||||
Field: 65005,
|
Field: 65005,
|
||||||
Name: "gogoproto.jsontag",
|
Name: "gogoproto.jsontag",
|
||||||
Tag: "bytes,65005,opt,name=jsontag",
|
Tag: "bytes,65005,opt,name=jsontag",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Moretags = &proto.ExtensionDesc{
|
var E_Moretags = &proto.ExtensionDesc{
|
||||||
|
@ -498,6 +601,7 @@ var E_Moretags = &proto.ExtensionDesc{
|
||||||
Field: 65006,
|
Field: 65006,
|
||||||
Name: "gogoproto.moretags",
|
Name: "gogoproto.moretags",
|
||||||
Tag: "bytes,65006,opt,name=moretags",
|
Tag: "bytes,65006,opt,name=moretags",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Casttype = &proto.ExtensionDesc{
|
var E_Casttype = &proto.ExtensionDesc{
|
||||||
|
@ -506,6 +610,7 @@ var E_Casttype = &proto.ExtensionDesc{
|
||||||
Field: 65007,
|
Field: 65007,
|
||||||
Name: "gogoproto.casttype",
|
Name: "gogoproto.casttype",
|
||||||
Tag: "bytes,65007,opt,name=casttype",
|
Tag: "bytes,65007,opt,name=casttype",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Castkey = &proto.ExtensionDesc{
|
var E_Castkey = &proto.ExtensionDesc{
|
||||||
|
@ -514,6 +619,7 @@ var E_Castkey = &proto.ExtensionDesc{
|
||||||
Field: 65008,
|
Field: 65008,
|
||||||
Name: "gogoproto.castkey",
|
Name: "gogoproto.castkey",
|
||||||
Tag: "bytes,65008,opt,name=castkey",
|
Tag: "bytes,65008,opt,name=castkey",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
var E_Castvalue = &proto.ExtensionDesc{
|
var E_Castvalue = &proto.ExtensionDesc{
|
||||||
|
@ -522,6 +628,25 @@ var E_Castvalue = &proto.ExtensionDesc{
|
||||||
Field: 65009,
|
Field: 65009,
|
||||||
Name: "gogoproto.castvalue",
|
Name: "gogoproto.castvalue",
|
||||||
Tag: "bytes,65009,opt,name=castvalue",
|
Tag: "bytes,65009,opt,name=castvalue",
|
||||||
|
Filename: "gogo.proto",
|
||||||
|
}
|
||||||
|
|
||||||
|
var E_Stdtime = &proto.ExtensionDesc{
|
||||||
|
ExtendedType: (*google_protobuf.FieldOptions)(nil),
|
||||||
|
ExtensionType: (*bool)(nil),
|
||||||
|
Field: 65010,
|
||||||
|
Name: "gogoproto.stdtime",
|
||||||
|
Tag: "varint,65010,opt,name=stdtime",
|
||||||
|
Filename: "gogo.proto",
|
||||||
|
}
|
||||||
|
|
||||||
|
var E_Stdduration = &proto.ExtensionDesc{
|
||||||
|
ExtendedType: (*google_protobuf.FieldOptions)(nil),
|
||||||
|
ExtensionType: (*bool)(nil),
|
||||||
|
Field: 65011,
|
||||||
|
Name: "gogoproto.stdduration",
|
||||||
|
Tag: "varint,65011,opt,name=stdduration",
|
||||||
|
Filename: "gogo.proto",
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -529,6 +654,7 @@ func init() {
|
||||||
proto.RegisterExtension(E_GoprotoEnumStringer)
|
proto.RegisterExtension(E_GoprotoEnumStringer)
|
||||||
proto.RegisterExtension(E_EnumStringer)
|
proto.RegisterExtension(E_EnumStringer)
|
||||||
proto.RegisterExtension(E_EnumCustomname)
|
proto.RegisterExtension(E_EnumCustomname)
|
||||||
|
proto.RegisterExtension(E_Enumdecl)
|
||||||
proto.RegisterExtension(E_EnumvalueCustomname)
|
proto.RegisterExtension(E_EnumvalueCustomname)
|
||||||
proto.RegisterExtension(E_GoprotoGettersAll)
|
proto.RegisterExtension(E_GoprotoGettersAll)
|
||||||
proto.RegisterExtension(E_GoprotoEnumPrefixAll)
|
proto.RegisterExtension(E_GoprotoEnumPrefixAll)
|
||||||
|
@ -556,6 +682,9 @@ func init() {
|
||||||
proto.RegisterExtension(E_GogoprotoImport)
|
proto.RegisterExtension(E_GogoprotoImport)
|
||||||
proto.RegisterExtension(E_ProtosizerAll)
|
proto.RegisterExtension(E_ProtosizerAll)
|
||||||
proto.RegisterExtension(E_CompareAll)
|
proto.RegisterExtension(E_CompareAll)
|
||||||
|
proto.RegisterExtension(E_TypedeclAll)
|
||||||
|
proto.RegisterExtension(E_EnumdeclAll)
|
||||||
|
proto.RegisterExtension(E_GoprotoRegistration)
|
||||||
proto.RegisterExtension(E_GoprotoGetters)
|
proto.RegisterExtension(E_GoprotoGetters)
|
||||||
proto.RegisterExtension(E_GoprotoStringer)
|
proto.RegisterExtension(E_GoprotoStringer)
|
||||||
proto.RegisterExtension(E_VerboseEqual)
|
proto.RegisterExtension(E_VerboseEqual)
|
||||||
|
@ -578,6 +707,7 @@ func init() {
|
||||||
proto.RegisterExtension(E_GoprotoUnrecognized)
|
proto.RegisterExtension(E_GoprotoUnrecognized)
|
||||||
proto.RegisterExtension(E_Protosizer)
|
proto.RegisterExtension(E_Protosizer)
|
||||||
proto.RegisterExtension(E_Compare)
|
proto.RegisterExtension(E_Compare)
|
||||||
|
proto.RegisterExtension(E_Typedecl)
|
||||||
proto.RegisterExtension(E_Nullable)
|
proto.RegisterExtension(E_Nullable)
|
||||||
proto.RegisterExtension(E_Embed)
|
proto.RegisterExtension(E_Embed)
|
||||||
proto.RegisterExtension(E_Customtype)
|
proto.RegisterExtension(E_Customtype)
|
||||||
|
@ -587,79 +717,88 @@ func init() {
|
||||||
proto.RegisterExtension(E_Casttype)
|
proto.RegisterExtension(E_Casttype)
|
||||||
proto.RegisterExtension(E_Castkey)
|
proto.RegisterExtension(E_Castkey)
|
||||||
proto.RegisterExtension(E_Castvalue)
|
proto.RegisterExtension(E_Castvalue)
|
||||||
|
proto.RegisterExtension(E_Stdtime)
|
||||||
|
proto.RegisterExtension(E_Stdduration)
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() { proto.RegisterFile("gogo.proto", fileDescriptorGogo) }
|
func init() { proto.RegisterFile("gogo.proto", fileDescriptorGogo) }
|
||||||
|
|
||||||
var fileDescriptorGogo = []byte{
|
var fileDescriptorGogo = []byte{
|
||||||
// 1098 bytes of a gzipped FileDescriptorProto
|
// 1201 bytes of a gzipped FileDescriptorProto
|
||||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x94, 0x97, 0xc9, 0x6f, 0x1c, 0x45,
|
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x98, 0xcb, 0x6f, 0x1c, 0x45,
|
||||||
0x14, 0x87, 0x85, 0x70, 0xe4, 0x99, 0xe7, 0x0d, 0x8f, 0x8d, 0x09, 0x11, 0x88, 0xe4, 0xc6, 0xc9,
|
0x13, 0xc0, 0xf5, 0xe9, 0x73, 0x64, 0x6f, 0xf9, 0x85, 0xd7, 0xc6, 0x84, 0x08, 0x44, 0x72, 0xe3,
|
||||||
0x39, 0x45, 0x28, 0x65, 0x45, 0x96, 0x63, 0x39, 0xa3, 0x20, 0x0c, 0x23, 0x13, 0x07, 0x10, 0x87,
|
0xe4, 0x9c, 0x22, 0x94, 0xb6, 0x22, 0xcb, 0xb1, 0x1c, 0x2b, 0x11, 0x06, 0x63, 0xe2, 0x00, 0xe2,
|
||||||
0x51, 0xcf, 0xb8, 0xdc, 0x19, 0xe8, 0xee, 0x6a, 0xba, 0xba, 0xa3, 0x38, 0x37, 0x14, 0x16, 0x21,
|
0xb0, 0x9a, 0xdd, 0x6d, 0x4f, 0x06, 0x66, 0xa6, 0x87, 0x99, 0x9e, 0x28, 0xce, 0x0d, 0x85, 0x87,
|
||||||
0xc4, 0x8e, 0x04, 0x09, 0x09, 0xcb, 0x81, 0x7d, 0x0d, 0xcb, 0x9d, 0x0b, 0x70, 0xe6, 0x7f, 0xe0,
|
0x10, 0xe2, 0x8d, 0x04, 0x09, 0x49, 0x80, 0x03, 0xef, 0x67, 0x78, 0x1f, 0xb9, 0xf0, 0xb8, 0xf2,
|
||||||
0x02, 0x98, 0x4d, 0xf2, 0xcd, 0x17, 0xf4, 0xba, 0xdf, 0xeb, 0xa9, 0x1e, 0x8f, 0x54, 0x35, 0xb7,
|
0x3f, 0x70, 0x01, 0xcc, 0xdb, 0x37, 0x5f, 0x50, 0xcd, 0x56, 0xcd, 0xf6, 0xac, 0x57, 0xea, 0xde,
|
||||||
0xf6, 0xb8, 0xbe, 0x6f, 0xaa, 0xdf, 0xeb, 0x7a, 0xbf, 0x69, 0x00, 0x5f, 0xf9, 0x6a, 0x31, 0x4e,
|
0xdb, 0xec, 0xba, 0x7f, 0xbf, 0xad, 0xa9, 0x9a, 0xae, 0xea, 0x31, 0x80, 0xaf, 0x7c, 0x35, 0x97,
|
||||||
0x54, 0xaa, 0x1a, 0x75, 0xbc, 0xce, 0x2f, 0x8f, 0x1c, 0xf5, 0x95, 0xf2, 0x03, 0x79, 0x3c, 0xff,
|
0xa4, 0x4a, 0xab, 0x7a, 0x0d, 0xaf, 0x8b, 0xcb, 0x03, 0x07, 0x7d, 0xa5, 0xfc, 0x50, 0x1e, 0x2e,
|
||||||
0xab, 0x93, 0x6d, 0x1f, 0xdf, 0x92, 0xba, 0x9b, 0xf4, 0xe2, 0x54, 0x25, 0xc5, 0x62, 0xf1, 0x20,
|
0x3e, 0x35, 0xf3, 0xcd, 0xc3, 0x6d, 0x99, 0xb5, 0xd2, 0x20, 0xd1, 0x2a, 0xed, 0x2c, 0x16, 0x77,
|
||||||
0xcc, 0xd1, 0xe2, 0xb6, 0x8c, 0xb2, 0xb0, 0x1d, 0x27, 0x72, 0xbb, 0x77, 0xa9, 0x71, 0xd7, 0x62,
|
0xc1, 0x34, 0x2d, 0x6e, 0xc8, 0x38, 0x8f, 0x1a, 0x49, 0x2a, 0x37, 0x83, 0xf3, 0xf5, 0x5b, 0xe6,
|
||||||
0x41, 0x2e, 0x32, 0xb9, 0xb8, 0x16, 0x65, 0xe1, 0x43, 0x71, 0xda, 0x53, 0x91, 0x3e, 0x7c, 0xf3,
|
0x3a, 0xe4, 0x1c, 0x93, 0x73, 0xcb, 0x71, 0x1e, 0xdd, 0x9d, 0xe8, 0x40, 0xc5, 0xd9, 0xfe, 0xeb,
|
||||||
0xb7, 0x5b, 0x8f, 0xde, 0x72, 0x6f, 0x6d, 0x63, 0x96, 0x50, 0xfc, 0x5f, 0x2b, 0x07, 0xc5, 0x06,
|
0x3f, 0xff, 0xff, 0xe0, 0xff, 0x6e, 0x1f, 0x59, 0x9f, 0x22, 0x14, 0xff, 0xb6, 0x56, 0x80, 0x62,
|
||||||
0xdc, 0x5e, 0xf1, 0xe9, 0x34, 0xe9, 0x45, 0xbe, 0x4c, 0x2c, 0xc6, 0x9f, 0xc8, 0x38, 0x67, 0x18,
|
0x1d, 0x6e, 0xac, 0xf8, 0x32, 0x9d, 0x06, 0xb1, 0x2f, 0x53, 0x8b, 0xf1, 0x3b, 0x32, 0x4e, 0x1b,
|
||||||
0x1f, 0x26, 0x54, 0xac, 0xc2, 0xd4, 0x28, 0xae, 0x9f, 0xc9, 0x35, 0x29, 0x4d, 0x49, 0x13, 0x66,
|
0xc6, 0x7b, 0x09, 0x15, 0x4b, 0x30, 0x3e, 0x88, 0xeb, 0x7b, 0x72, 0x8d, 0x49, 0x53, 0xb2, 0x02,
|
||||||
0x72, 0x49, 0x37, 0xd3, 0xa9, 0x0a, 0x23, 0x2f, 0x94, 0x16, 0xcd, 0x2f, 0xb9, 0xa6, 0xbe, 0x31,
|
0x93, 0x85, 0xa4, 0x95, 0x67, 0x5a, 0x45, 0xb1, 0x17, 0x49, 0x8b, 0xe6, 0x87, 0x42, 0x53, 0x5b,
|
||||||
0x8d, 0xd8, 0x6a, 0x49, 0x89, 0xf3, 0x30, 0x8f, 0x9f, 0x5c, 0xf4, 0x82, 0x4c, 0x9a, 0xb6, 0x63,
|
0x9f, 0x40, 0x6c, 0xa9, 0xa4, 0x84, 0x80, 0x11, 0xfc, 0xa6, 0x2d, 0x5b, 0xa1, 0xc5, 0xf0, 0x23,
|
||||||
0x43, 0x6d, 0xe7, 0x71, 0x19, 0x2b, 0x7f, 0xbd, 0x32, 0x96, 0x2b, 0xe7, 0x4a, 0x81, 0xe1, 0x35,
|
0x05, 0x52, 0xae, 0x17, 0x67, 0x60, 0x06, 0xaf, 0xcf, 0x79, 0x61, 0x2e, 0xcd, 0x48, 0x0e, 0xf5,
|
||||||
0x3a, 0xe1, 0xcb, 0x34, 0x95, 0x89, 0x6e, 0x7b, 0x41, 0x30, 0x64, 0x93, 0x67, 0x7a, 0x41, 0x69,
|
0xf5, 0x9c, 0xc1, 0x65, 0x2c, 0xfb, 0xe9, 0xe2, 0x50, 0x11, 0xce, 0x74, 0x29, 0x30, 0x62, 0x32,
|
||||||
0xbc, 0xba, 0x5b, 0xed, 0x44, 0xb3, 0x20, 0x57, 0x82, 0x40, 0x6c, 0xc2, 0x1d, 0x43, 0x3a, 0xeb,
|
0xaa, 0xe8, 0x4b, 0xad, 0x65, 0x9a, 0x35, 0xbc, 0xb0, 0x5f, 0x78, 0x27, 0x82, 0xb0, 0x34, 0x5e,
|
||||||
0xe0, 0xbc, 0x46, 0xce, 0xf9, 0x03, 0xdd, 0x45, 0x6d, 0x0b, 0xf8, 0xf3, 0xb2, 0x1f, 0x0e, 0xce,
|
0xda, 0xae, 0x56, 0x71, 0xa5, 0x43, 0x2e, 0x86, 0xa1, 0xd8, 0x80, 0x9b, 0xfa, 0x3c, 0x15, 0x0e,
|
||||||
0x77, 0xc8, 0xd9, 0x20, 0x96, 0xdb, 0x82, 0xc6, 0xfb, 0x61, 0xf6, 0xa2, 0x4c, 0x3a, 0x4a, 0xcb,
|
0xce, 0xcb, 0xe4, 0x9c, 0xd9, 0xf3, 0x64, 0xa0, 0x76, 0x0d, 0xf8, 0xfb, 0xb2, 0x96, 0x0e, 0xce,
|
||||||
0xb6, 0x7c, 0x2a, 0xf3, 0x02, 0x07, 0xdd, 0x75, 0xd2, 0xcd, 0x10, 0xb8, 0x86, 0x1c, 0xba, 0x4e,
|
0xd7, 0xc8, 0x59, 0x27, 0x96, 0x4b, 0x8a, 0xc6, 0x53, 0x30, 0x75, 0x4e, 0xa6, 0x4d, 0x95, 0xc9,
|
||||||
0x42, 0x6d, 0xdb, 0xeb, 0x4a, 0x07, 0xc5, 0x0d, 0x52, 0x8c, 0xe3, 0x7a, 0x44, 0x57, 0x60, 0xd2,
|
0x86, 0x7c, 0x24, 0xf7, 0x42, 0x07, 0xdd, 0x15, 0xd2, 0x4d, 0x12, 0xb8, 0x8c, 0x1c, 0xba, 0x8e,
|
||||||
0x57, 0xc5, 0x2d, 0x39, 0xe0, 0xef, 0x12, 0x3e, 0xc1, 0x0c, 0x29, 0x62, 0x15, 0x67, 0x81, 0x97,
|
0xc2, 0xc8, 0xa6, 0xd7, 0x92, 0x0e, 0x8a, 0xab, 0xa4, 0x18, 0xc6, 0xf5, 0x88, 0x2e, 0xc2, 0x98,
|
||||||
0xba, 0xec, 0xe0, 0x3d, 0x56, 0x30, 0x43, 0x8a, 0x11, 0xca, 0xfa, 0x3e, 0x2b, 0xb4, 0x51, 0xcf,
|
0xaf, 0x3a, 0xb7, 0xe4, 0x80, 0x5f, 0x23, 0x7c, 0x94, 0x19, 0x52, 0x24, 0x2a, 0xc9, 0x43, 0x4f,
|
||||||
0x65, 0x98, 0x50, 0x51, 0xb0, 0xa3, 0x22, 0x97, 0x4d, 0x7c, 0x40, 0x06, 0x20, 0x04, 0x05, 0x4b,
|
0xbb, 0x44, 0xf0, 0x3a, 0x2b, 0x98, 0x21, 0xc5, 0x00, 0x69, 0x7d, 0x83, 0x15, 0x99, 0x91, 0xcf,
|
||||||
0x50, 0x77, 0x6d, 0xc4, 0x87, 0x84, 0xd7, 0x24, 0x77, 0xa0, 0x09, 0x33, 0x3c, 0x64, 0x7a, 0x2a,
|
0x05, 0x18, 0x55, 0x71, 0xb8, 0xa5, 0x62, 0x97, 0x20, 0xde, 0x24, 0x03, 0x10, 0x82, 0x82, 0x79,
|
||||||
0x72, 0x50, 0x7c, 0x44, 0x8a, 0x69, 0x03, 0xa3, 0xdb, 0x48, 0xa5, 0x4e, 0x7d, 0xe9, 0x22, 0xf9,
|
0xa8, 0xb9, 0x16, 0xe2, 0xad, 0x6d, 0xde, 0x1e, 0x5c, 0x81, 0x15, 0x98, 0xe4, 0x06, 0x15, 0xa8,
|
||||||
0x98, 0x6f, 0x83, 0x10, 0x2a, 0x65, 0x47, 0x46, 0xdd, 0x0b, 0x6e, 0x86, 0x4f, 0xb8, 0x94, 0xcc,
|
0xd8, 0x41, 0xf1, 0x36, 0x29, 0x26, 0x0c, 0x8c, 0x6e, 0x43, 0xcb, 0x4c, 0xfb, 0xd2, 0x45, 0xf2,
|
||||||
0xa0, 0x62, 0x15, 0xa6, 0x42, 0x2f, 0xd1, 0x17, 0xbc, 0xc0, 0xa9, 0x1d, 0x9f, 0x92, 0x63, 0xb2,
|
0x0e, 0xdf, 0x06, 0x21, 0x94, 0xca, 0xa6, 0x8c, 0x5b, 0x67, 0xdd, 0x0c, 0xef, 0x72, 0x2a, 0x99,
|
||||||
0x84, 0xa8, 0x22, 0x59, 0x34, 0x8a, 0xe6, 0x33, 0xae, 0x88, 0x81, 0xd1, 0xd1, 0xd3, 0xa9, 0xd7,
|
0x41, 0xc5, 0x12, 0x8c, 0x47, 0x5e, 0x9a, 0x9d, 0xf5, 0x42, 0xa7, 0x72, 0xbc, 0x47, 0x8e, 0xb1,
|
||||||
0x09, 0x64, 0x7b, 0x14, 0xdb, 0xe7, 0x7c, 0xf4, 0x0a, 0x76, 0xdd, 0x34, 0x2e, 0x41, 0x5d, 0xf7,
|
0x12, 0xa2, 0x8c, 0xe4, 0xf1, 0x20, 0x9a, 0xf7, 0x39, 0x23, 0x06, 0x46, 0x5b, 0x2f, 0xd3, 0x5e,
|
||||||
0x2e, 0x3b, 0x69, 0xbe, 0xe0, 0x4e, 0xe7, 0x00, 0xc2, 0x8f, 0xc1, 0x9d, 0x43, 0x47, 0xbd, 0x83,
|
0x33, 0x94, 0x8d, 0x41, 0x6c, 0x1f, 0xf0, 0xd6, 0xeb, 0xb0, 0xab, 0xa6, 0x71, 0x1e, 0x6a, 0x59,
|
||||||
0xec, 0x4b, 0x92, 0x2d, 0x0c, 0x19, 0xf7, 0x34, 0x12, 0x46, 0x55, 0x7e, 0xc5, 0x23, 0x41, 0x0e,
|
0x70, 0xc1, 0x49, 0xf3, 0x21, 0x57, 0xba, 0x00, 0x10, 0x7e, 0x00, 0x6e, 0xee, 0x3b, 0x26, 0x1c,
|
||||||
0xb8, 0x5a, 0x30, 0x9f, 0x45, 0xda, 0xdb, 0x1e, 0xad, 0x6a, 0x5f, 0x73, 0xd5, 0x0a, 0xb6, 0x52,
|
0x64, 0x1f, 0x91, 0x6c, 0xb6, 0xcf, 0xa8, 0xa0, 0x96, 0x30, 0xa8, 0xf2, 0x63, 0x6e, 0x09, 0xb2,
|
||||||
0xb5, 0x73, 0xb0, 0x40, 0xc6, 0xd1, 0xfa, 0xfa, 0x0d, 0x0f, 0xd6, 0x82, 0xde, 0xac, 0x76, 0xf7,
|
0xc7, 0xb5, 0x06, 0x33, 0x79, 0x9c, 0x79, 0x9b, 0x83, 0x65, 0xed, 0x13, 0xce, 0x5a, 0x87, 0xad,
|
||||||
0x71, 0x38, 0x52, 0x96, 0xf3, 0x52, 0x2a, 0x23, 0x8d, 0x4c, 0x3b, 0xf4, 0x62, 0x07, 0xf3, 0x4d,
|
0x64, 0xed, 0x34, 0xcc, 0x92, 0x71, 0xb0, 0xba, 0x7e, 0xca, 0x8d, 0xb5, 0x43, 0x6f, 0x54, 0xab,
|
||||||
0x32, 0xf3, 0xc4, 0x5f, 0x2b, 0x05, 0xeb, 0x5e, 0x8c, 0xf2, 0x47, 0xe1, 0x30, 0xcb, 0xb3, 0x28,
|
0xfb, 0x20, 0x1c, 0x28, 0xd3, 0x79, 0x5e, 0xcb, 0x38, 0x43, 0xa6, 0x11, 0x79, 0x89, 0x83, 0xf9,
|
||||||
0x91, 0x5d, 0xe5, 0x47, 0xbd, 0xcb, 0x72, 0xcb, 0x41, 0xfd, 0xed, 0x40, 0xab, 0x36, 0x0d, 0x1c,
|
0x3a, 0x99, 0xb9, 0xe3, 0x2f, 0x97, 0x82, 0x55, 0x2f, 0x41, 0xf9, 0xfd, 0xb0, 0x9f, 0xe5, 0x79,
|
||||||
0xcd, 0x67, 0xe1, 0xb6, 0xf2, 0xf7, 0x46, 0xbb, 0x17, 0xc6, 0x2a, 0x49, 0x2d, 0xc6, 0xef, 0xb8,
|
0x9c, 0xca, 0x96, 0xf2, 0xe3, 0xe0, 0x82, 0x6c, 0x3b, 0xa8, 0x3f, 0xeb, 0x29, 0xd5, 0x86, 0x81,
|
||||||
0x53, 0x25, 0x77, 0x36, 0xc7, 0xc4, 0x1a, 0x4c, 0xe7, 0x7f, 0xba, 0x3e, 0x92, 0xdf, 0x93, 0x68,
|
0xa3, 0xf9, 0x24, 0xdc, 0x50, 0x9e, 0x55, 0x1a, 0x41, 0x94, 0xa8, 0x54, 0x5b, 0x8c, 0x9f, 0x73,
|
||||||
0xaa, 0x4f, 0xd1, 0xe0, 0xe8, 0xaa, 0x30, 0xf6, 0x12, 0x97, 0xf9, 0xf7, 0x03, 0x0f, 0x0e, 0x42,
|
0xa5, 0x4a, 0xee, 0x64, 0x81, 0x89, 0x65, 0x98, 0x28, 0x3e, 0xba, 0x3e, 0x92, 0x5f, 0x90, 0x68,
|
||||||
0x8a, 0xa7, 0x6f, 0x66, 0x20, 0x89, 0x1b, 0xf7, 0x1c, 0x90, 0xac, 0x4b, 0xad, 0x3d, 0xbf, 0xf4,
|
0xbc, 0x4b, 0x51, 0xe3, 0x68, 0xa9, 0x28, 0xf1, 0x52, 0x97, 0xfe, 0xf7, 0x25, 0x37, 0x0e, 0x42,
|
||||||
0x3c, 0xbd, 0x47, 0x67, 0xb6, 0x1a, 0xc4, 0xe2, 0x01, 0x2c, 0x4f, 0x35, 0x2e, 0xed, 0xb2, 0x2b,
|
0xa8, 0x71, 0xe8, 0xad, 0x44, 0xe2, 0xb4, 0x77, 0x30, 0x7c, 0xc5, 0x8d, 0x83, 0x19, 0x52, 0xf0,
|
||||||
0x7b, 0x65, 0x85, 0x2a, 0x69, 0x29, 0xce, 0xc0, 0x54, 0x25, 0x2a, 0xed, 0xaa, 0x67, 0x48, 0x35,
|
0x81, 0xc1, 0x41, 0xf1, 0x35, 0x2b, 0x98, 0x41, 0xc5, 0x3d, 0xdd, 0x41, 0x9b, 0x4a, 0x3f, 0xc8,
|
||||||
0x69, 0x26, 0xa5, 0x38, 0x01, 0x63, 0x18, 0x7b, 0x76, 0xfc, 0x59, 0xc2, 0xf3, 0xe5, 0xe2, 0x14,
|
0x74, 0xea, 0xe1, 0x6a, 0x8b, 0xea, 0x9b, 0xed, 0xea, 0x21, 0x6c, 0xdd, 0x40, 0xc5, 0x29, 0x98,
|
||||||
0xd4, 0x38, 0xee, 0xec, 0xe8, 0x73, 0x84, 0x96, 0x08, 0xe2, 0x1c, 0x75, 0x76, 0xfc, 0x79, 0xc6,
|
0xec, 0x39, 0x62, 0xd4, 0x6f, 0xdb, 0x63, 0x5b, 0x95, 0x59, 0xe6, 0xf9, 0xa5, 0xf0, 0xd1, 0x1d,
|
||||||
0x19, 0x41, 0xdc, 0xbd, 0x84, 0x3f, 0xbe, 0x38, 0x46, 0xe3, 0x8a, 0x6b, 0xb7, 0x04, 0xe3, 0x94,
|
0x6a, 0x46, 0xd5, 0x13, 0x86, 0xb8, 0x13, 0xeb, 0x5e, 0x3d, 0x07, 0xd8, 0x65, 0x17, 0x77, 0xca,
|
||||||
0x71, 0x76, 0xfa, 0x05, 0xfa, 0x72, 0x26, 0xc4, 0x7d, 0x70, 0xc8, 0xb1, 0xe0, 0x2f, 0x11, 0x5a,
|
0xd2, 0x57, 0x8e, 0x01, 0xe2, 0x04, 0x8c, 0x57, 0xce, 0x00, 0x76, 0xd5, 0x63, 0xa4, 0x1a, 0x33,
|
||||||
0xac, 0x17, 0xab, 0x30, 0x61, 0xe4, 0x9a, 0x1d, 0x7f, 0x99, 0x70, 0x93, 0xc2, 0xad, 0x53, 0xae,
|
0x8f, 0x00, 0xe2, 0x08, 0x0c, 0xe1, 0x3c, 0xb7, 0xe3, 0x8f, 0x13, 0x5e, 0x2c, 0x17, 0xc7, 0x60,
|
||||||
0xd9, 0x05, 0xaf, 0xf0, 0xd6, 0x89, 0xc0, 0xb2, 0x71, 0xa4, 0xd9, 0xe9, 0x57, 0xb9, 0xea, 0x8c,
|
0x84, 0xe7, 0xb8, 0x1d, 0x7d, 0x82, 0xd0, 0x12, 0x41, 0x9c, 0x67, 0xb8, 0x1d, 0x7f, 0x92, 0x71,
|
||||||
0x88, 0x65, 0xa8, 0x97, 0x63, 0xca, 0xce, 0xbf, 0x46, 0x7c, 0x9f, 0xc1, 0x0a, 0x18, 0x63, 0xd2,
|
0x46, 0x10, 0x77, 0x4f, 0xe1, 0xb7, 0x4f, 0x0f, 0x51, 0x1f, 0xe6, 0xdc, 0xcd, 0xc3, 0x30, 0x0d,
|
||||||
0xae, 0x78, 0x9d, 0x2b, 0x60, 0x50, 0x78, 0x8c, 0x06, 0xa3, 0xcf, 0x6e, 0x7a, 0x83, 0x8f, 0xd1,
|
0x6f, 0x3b, 0xfd, 0x14, 0xfd, 0x38, 0x13, 0xe2, 0x0e, 0xd8, 0xe7, 0x98, 0xf0, 0x67, 0x08, 0xed,
|
||||||
0x40, 0xf2, 0x61, 0x37, 0xf3, 0x69, 0x61, 0x57, 0xbc, 0xc9, 0xdd, 0xcc, 0xd7, 0xe3, 0x36, 0x06,
|
0xac, 0x17, 0x4b, 0x30, 0x6a, 0x0c, 0x6c, 0x3b, 0xfe, 0x2c, 0xe1, 0x26, 0x85, 0xa1, 0xd3, 0xc0,
|
||||||
0xb3, 0xc4, 0xee, 0x78, 0x8b, 0xb7, 0x31, 0x10, 0x25, 0xa2, 0x05, 0x8d, 0x83, 0x39, 0x62, 0xf7,
|
0xb6, 0x0b, 0x9e, 0xe3, 0xd0, 0x89, 0xc0, 0xb4, 0xf1, 0xac, 0xb6, 0xd3, 0xcf, 0x73, 0xd6, 0x19,
|
||||||
0xbd, 0x4d, 0xbe, 0xd9, 0x03, 0x31, 0x22, 0x1e, 0x81, 0x85, 0xe1, 0x19, 0x62, 0xb7, 0x5e, 0xdd,
|
0x11, 0x0b, 0x50, 0x2b, 0xfb, 0xaf, 0x9d, 0x7f, 0x81, 0xf8, 0x2e, 0x83, 0x19, 0x30, 0xfa, 0xbf,
|
||||||
0x1b, 0xf8, 0xd5, 0x6f, 0x46, 0x88, 0x38, 0xd7, 0xff, 0xd5, 0x6f, 0xe6, 0x87, 0x5d, 0x7b, 0x6d,
|
0x5d, 0xf1, 0x22, 0x67, 0xc0, 0xa0, 0x70, 0x1b, 0xf5, 0xce, 0x74, 0xbb, 0xe9, 0x25, 0xde, 0x46,
|
||||||
0xaf, 0xfa, 0x62, 0x67, 0xc6, 0x87, 0x58, 0x01, 0xe8, 0x8f, 0x6e, 0xbb, 0xeb, 0x3a, 0xb9, 0x0c,
|
0x3d, 0x23, 0x1d, 0xab, 0x59, 0xb4, 0x41, 0xbb, 0xe2, 0x65, 0xae, 0x66, 0xb1, 0x1e, 0xc3, 0xe8,
|
||||||
0x08, 0x8f, 0x06, 0x4d, 0x6e, 0x3b, 0x7f, 0x83, 0x8f, 0x06, 0x11, 0x62, 0x09, 0x6a, 0x51, 0x16,
|
0x1d, 0x92, 0x76, 0xc7, 0x2b, 0x1c, 0x46, 0xcf, 0x8c, 0x14, 0x6b, 0x50, 0xdf, 0x3b, 0x20, 0xed,
|
||||||
0x04, 0xf8, 0x70, 0x34, 0xee, 0x1e, 0x12, 0x13, 0x32, 0xd8, 0x62, 0xf6, 0xf7, 0x7d, 0x3a, 0x18,
|
0xbe, 0x57, 0xc9, 0x37, 0xb5, 0x67, 0x3e, 0x8a, 0xfb, 0x60, 0xb6, 0xff, 0x70, 0xb4, 0x5b, 0x2f,
|
||||||
0x0c, 0x88, 0x13, 0x70, 0x48, 0x86, 0x1d, 0xb9, 0x65, 0x23, 0xff, 0xd8, 0xe7, 0x81, 0x80, 0xab,
|
0xed, 0xf4, 0xbc, 0xce, 0x98, 0xb3, 0x51, 0x9c, 0xee, 0x76, 0x59, 0x73, 0x30, 0xda, 0xb5, 0x97,
|
||||||
0xc5, 0x32, 0x40, 0xf1, 0xd2, 0x98, 0xee, 0xc4, 0xd6, 0x6f, 0xfd, 0x73, 0xbf, 0x78, 0x07, 0x35,
|
0x77, 0xaa, 0x8d, 0xd6, 0x9c, 0x8b, 0x62, 0x11, 0xa0, 0x3b, 0x93, 0xec, 0xae, 0x2b, 0xe4, 0x32,
|
||||||
0x90, 0xbe, 0x20, 0x7f, 0xeb, 0xb4, 0x08, 0x76, 0xab, 0x82, 0xfc, 0x45, 0xf3, 0x24, 0x8c, 0x3f,
|
0x20, 0xdc, 0x1a, 0x34, 0x92, 0xec, 0xfc, 0x55, 0xde, 0x1a, 0x44, 0xe0, 0xd6, 0xe0, 0x69, 0x64,
|
||||||
0xa1, 0x55, 0x94, 0x7a, 0xbe, 0x8d, 0xfe, 0x8b, 0x68, 0x5e, 0x8f, 0x05, 0x0b, 0x55, 0x22, 0x53,
|
0xa7, 0xaf, 0xf1, 0xd6, 0x60, 0x44, 0xcc, 0xc3, 0x48, 0x9c, 0x87, 0x21, 0x3e, 0x5b, 0xf5, 0x5b,
|
||||||
0xcf, 0xd7, 0x36, 0xf6, 0x6f, 0x62, 0x4b, 0x00, 0xe1, 0xae, 0xa7, 0x53, 0x97, 0xfb, 0xfe, 0x87,
|
0xfb, 0x8c, 0x1b, 0x19, 0xb6, 0x19, 0xfe, 0x65, 0x97, 0x60, 0x06, 0xc4, 0x11, 0xd8, 0x27, 0xa3,
|
||||||
0x61, 0x06, 0x70, 0xd3, 0x78, 0xfd, 0xa4, 0xdc, 0xb1, 0xb1, 0xff, 0xf2, 0xa6, 0x69, 0xbd, 0x38,
|
0xa6, 0x6c, 0xdb, 0xc8, 0x5f, 0x77, 0xb9, 0x9f, 0xe0, 0x6a, 0xb1, 0x00, 0xd0, 0x79, 0x99, 0xc6,
|
||||||
0x05, 0x75, 0xbc, 0xcc, 0xdf, 0xb7, 0x6d, 0xf0, 0x7f, 0x04, 0xf7, 0x89, 0xd3, 0xc7, 0x60, 0xae,
|
0x28, 0x6c, 0xec, 0x6f, 0xbb, 0x9d, 0xf7, 0x7a, 0x03, 0xe9, 0x0a, 0x8a, 0xb7, 0x71, 0x8b, 0x60,
|
||||||
0xab, 0xc2, 0x41, 0xec, 0x34, 0x34, 0x55, 0x53, 0xb5, 0xf2, 0x07, 0xf1, 0xff, 0x00, 0x00, 0x00,
|
0xbb, 0x2a, 0x28, 0x5e, 0xc0, 0x8f, 0xc2, 0xf0, 0x43, 0x99, 0x8a, 0xb5, 0xe7, 0xdb, 0xe8, 0xdf,
|
||||||
0xff, 0xff, 0x87, 0x5c, 0xee, 0x2b, 0x7e, 0x11, 0x00, 0x00,
|
0x89, 0xe6, 0xf5, 0x98, 0xb0, 0x48, 0xa5, 0x52, 0x7b, 0x7e, 0x66, 0x63, 0xff, 0x20, 0xb6, 0x04,
|
||||||
|
0x10, 0x6e, 0x79, 0x99, 0x76, 0xb9, 0xef, 0x3f, 0x19, 0x66, 0x00, 0x83, 0xc6, 0xeb, 0x87, 0xe5,
|
||||||
|
0x96, 0x8d, 0xfd, 0x8b, 0x83, 0xa6, 0xf5, 0xe2, 0x18, 0xd4, 0xf0, 0xb2, 0xf8, 0x3f, 0x84, 0x0d,
|
||||||
|
0xfe, 0x9b, 0xe0, 0x2e, 0x81, 0xbf, 0x9c, 0xe9, 0xb6, 0x0e, 0xec, 0xc9, 0xfe, 0x87, 0x2a, 0xcd,
|
||||||
|
0xeb, 0xc5, 0x22, 0x8c, 0x66, 0xba, 0xdd, 0xce, 0xe9, 0x44, 0x63, 0xc1, 0xff, 0xdd, 0x2d, 0x5f,
|
||||||
|
0x72, 0x4b, 0xe6, 0xf8, 0x21, 0x98, 0x6e, 0xa9, 0xa8, 0x17, 0x3c, 0x0e, 0x2b, 0x6a, 0x45, 0xad,
|
||||||
|
0x15, 0xbb, 0xe8, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0a, 0x9c, 0xec, 0xd8, 0x50, 0x13, 0x00,
|
||||||
|
0x00,
|
||||||
}
|
}
|
||||||
|
|
10
vendor/github.com/gogo/protobuf/gogoproto/gogo.proto
generated
vendored
10
vendor/github.com/gogo/protobuf/gogoproto/gogo.proto
generated
vendored
|
@ -39,6 +39,7 @@ extend google.protobuf.EnumOptions {
|
||||||
optional bool goproto_enum_stringer = 62021;
|
optional bool goproto_enum_stringer = 62021;
|
||||||
optional bool enum_stringer = 62022;
|
optional bool enum_stringer = 62022;
|
||||||
optional string enum_customname = 62023;
|
optional string enum_customname = 62023;
|
||||||
|
optional bool enumdecl = 62024;
|
||||||
}
|
}
|
||||||
|
|
||||||
extend google.protobuf.EnumValueOptions {
|
extend google.protobuf.EnumValueOptions {
|
||||||
|
@ -77,6 +78,10 @@ extend google.protobuf.FileOptions {
|
||||||
optional bool gogoproto_import = 63027;
|
optional bool gogoproto_import = 63027;
|
||||||
optional bool protosizer_all = 63028;
|
optional bool protosizer_all = 63028;
|
||||||
optional bool compare_all = 63029;
|
optional bool compare_all = 63029;
|
||||||
|
optional bool typedecl_all = 63030;
|
||||||
|
optional bool enumdecl_all = 63031;
|
||||||
|
|
||||||
|
optional bool goproto_registration = 63032;
|
||||||
}
|
}
|
||||||
|
|
||||||
extend google.protobuf.MessageOptions {
|
extend google.protobuf.MessageOptions {
|
||||||
|
@ -107,6 +112,8 @@ extend google.protobuf.MessageOptions {
|
||||||
|
|
||||||
optional bool protosizer = 64028;
|
optional bool protosizer = 64028;
|
||||||
optional bool compare = 64029;
|
optional bool compare = 64029;
|
||||||
|
|
||||||
|
optional bool typedecl = 64030;
|
||||||
}
|
}
|
||||||
|
|
||||||
extend google.protobuf.FieldOptions {
|
extend google.protobuf.FieldOptions {
|
||||||
|
@ -119,4 +126,7 @@ extend google.protobuf.FieldOptions {
|
||||||
optional string casttype = 65007;
|
optional string casttype = 65007;
|
||||||
optional string castkey = 65008;
|
optional string castkey = 65008;
|
||||||
optional string castvalue = 65009;
|
optional string castvalue = 65009;
|
||||||
|
|
||||||
|
optional bool stdtime = 65010;
|
||||||
|
optional bool stdduration = 65011;
|
||||||
}
|
}
|
||||||
|
|
47
vendor/github.com/gogo/protobuf/gogoproto/helper.go
generated
vendored
47
vendor/github.com/gogo/protobuf/gogoproto/helper.go
generated
vendored
|
@ -39,6 +39,14 @@ func IsNullable(field *google_protobuf.FieldDescriptorProto) bool {
|
||||||
return proto.GetBoolExtension(field.Options, E_Nullable, true)
|
return proto.GetBoolExtension(field.Options, E_Nullable, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func IsStdTime(field *google_protobuf.FieldDescriptorProto) bool {
|
||||||
|
return proto.GetBoolExtension(field.Options, E_Stdtime, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsStdDuration(field *google_protobuf.FieldDescriptorProto) bool {
|
||||||
|
return proto.GetBoolExtension(field.Options, E_Stdduration, false)
|
||||||
|
}
|
||||||
|
|
||||||
func NeedsNilCheck(proto3 bool, field *google_protobuf.FieldDescriptorProto) bool {
|
func NeedsNilCheck(proto3 bool, field *google_protobuf.FieldDescriptorProto) bool {
|
||||||
nullable := IsNullable(field)
|
nullable := IsNullable(field)
|
||||||
if field.IsMessage() || IsCustomType(field) {
|
if field.IsMessage() || IsCustomType(field) {
|
||||||
|
@ -82,7 +90,18 @@ func IsCastValue(field *google_protobuf.FieldDescriptorProto) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func HasEnumDecl(file *google_protobuf.FileDescriptorProto, enum *google_protobuf.EnumDescriptorProto) bool {
|
||||||
|
return proto.GetBoolExtension(enum.Options, E_Enumdecl, proto.GetBoolExtension(file.Options, E_EnumdeclAll, true))
|
||||||
|
}
|
||||||
|
|
||||||
|
func HasTypeDecl(file *google_protobuf.FileDescriptorProto, message *google_protobuf.DescriptorProto) bool {
|
||||||
|
return proto.GetBoolExtension(message.Options, E_Typedecl, proto.GetBoolExtension(file.Options, E_TypedeclAll, true))
|
||||||
|
}
|
||||||
|
|
||||||
func GetCustomType(field *google_protobuf.FieldDescriptorProto) string {
|
func GetCustomType(field *google_protobuf.FieldDescriptorProto) string {
|
||||||
|
if field == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
if field.Options != nil {
|
if field.Options != nil {
|
||||||
v, err := proto.GetExtension(field.Options, E_Customtype)
|
v, err := proto.GetExtension(field.Options, E_Customtype)
|
||||||
if err == nil && v.(*string) != nil {
|
if err == nil && v.(*string) != nil {
|
||||||
|
@ -93,6 +112,9 @@ func GetCustomType(field *google_protobuf.FieldDescriptorProto) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetCastType(field *google_protobuf.FieldDescriptorProto) string {
|
func GetCastType(field *google_protobuf.FieldDescriptorProto) string {
|
||||||
|
if field == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
if field.Options != nil {
|
if field.Options != nil {
|
||||||
v, err := proto.GetExtension(field.Options, E_Casttype)
|
v, err := proto.GetExtension(field.Options, E_Casttype)
|
||||||
if err == nil && v.(*string) != nil {
|
if err == nil && v.(*string) != nil {
|
||||||
|
@ -103,6 +125,9 @@ func GetCastType(field *google_protobuf.FieldDescriptorProto) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetCastKey(field *google_protobuf.FieldDescriptorProto) string {
|
func GetCastKey(field *google_protobuf.FieldDescriptorProto) string {
|
||||||
|
if field == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
if field.Options != nil {
|
if field.Options != nil {
|
||||||
v, err := proto.GetExtension(field.Options, E_Castkey)
|
v, err := proto.GetExtension(field.Options, E_Castkey)
|
||||||
if err == nil && v.(*string) != nil {
|
if err == nil && v.(*string) != nil {
|
||||||
|
@ -113,6 +138,9 @@ func GetCastKey(field *google_protobuf.FieldDescriptorProto) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetCastValue(field *google_protobuf.FieldDescriptorProto) string {
|
func GetCastValue(field *google_protobuf.FieldDescriptorProto) string {
|
||||||
|
if field == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
if field.Options != nil {
|
if field.Options != nil {
|
||||||
v, err := proto.GetExtension(field.Options, E_Castvalue)
|
v, err := proto.GetExtension(field.Options, E_Castvalue)
|
||||||
if err == nil && v.(*string) != nil {
|
if err == nil && v.(*string) != nil {
|
||||||
|
@ -147,6 +175,9 @@ func IsEnumValueCustomName(field *google_protobuf.EnumValueDescriptorProto) bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetCustomName(field *google_protobuf.FieldDescriptorProto) string {
|
func GetCustomName(field *google_protobuf.FieldDescriptorProto) string {
|
||||||
|
if field == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
if field.Options != nil {
|
if field.Options != nil {
|
||||||
v, err := proto.GetExtension(field.Options, E_Customname)
|
v, err := proto.GetExtension(field.Options, E_Customname)
|
||||||
if err == nil && v.(*string) != nil {
|
if err == nil && v.(*string) != nil {
|
||||||
|
@ -157,6 +188,9 @@ func GetCustomName(field *google_protobuf.FieldDescriptorProto) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetEnumCustomName(field *google_protobuf.EnumDescriptorProto) string {
|
func GetEnumCustomName(field *google_protobuf.EnumDescriptorProto) string {
|
||||||
|
if field == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
if field.Options != nil {
|
if field.Options != nil {
|
||||||
v, err := proto.GetExtension(field.Options, E_EnumCustomname)
|
v, err := proto.GetExtension(field.Options, E_EnumCustomname)
|
||||||
if err == nil && v.(*string) != nil {
|
if err == nil && v.(*string) != nil {
|
||||||
|
@ -167,6 +201,9 @@ func GetEnumCustomName(field *google_protobuf.EnumDescriptorProto) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetEnumValueCustomName(field *google_protobuf.EnumValueDescriptorProto) string {
|
func GetEnumValueCustomName(field *google_protobuf.EnumValueDescriptorProto) string {
|
||||||
|
if field == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
if field.Options != nil {
|
if field.Options != nil {
|
||||||
v, err := proto.GetExtension(field.Options, E_EnumvalueCustomname)
|
v, err := proto.GetExtension(field.Options, E_EnumvalueCustomname)
|
||||||
if err == nil && v.(*string) != nil {
|
if err == nil && v.(*string) != nil {
|
||||||
|
@ -177,6 +214,9 @@ func GetEnumValueCustomName(field *google_protobuf.EnumValueDescriptorProto) str
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetJsonTag(field *google_protobuf.FieldDescriptorProto) *string {
|
func GetJsonTag(field *google_protobuf.FieldDescriptorProto) *string {
|
||||||
|
if field == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
if field.Options != nil {
|
if field.Options != nil {
|
||||||
v, err := proto.GetExtension(field.Options, E_Jsontag)
|
v, err := proto.GetExtension(field.Options, E_Jsontag)
|
||||||
if err == nil && v.(*string) != nil {
|
if err == nil && v.(*string) != nil {
|
||||||
|
@ -187,6 +227,9 @@ func GetJsonTag(field *google_protobuf.FieldDescriptorProto) *string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetMoreTags(field *google_protobuf.FieldDescriptorProto) *string {
|
func GetMoreTags(field *google_protobuf.FieldDescriptorProto) *string {
|
||||||
|
if field == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
if field.Options != nil {
|
if field.Options != nil {
|
||||||
v, err := proto.GetExtension(field.Options, E_Moretags)
|
v, err := proto.GetExtension(field.Options, E_Moretags)
|
||||||
if err == nil && v.(*string) != nil {
|
if err == nil && v.(*string) != nil {
|
||||||
|
@ -308,3 +351,7 @@ func ImportsGoGoProto(file *google_protobuf.FileDescriptorProto) bool {
|
||||||
func HasCompare(file *google_protobuf.FileDescriptorProto, message *google_protobuf.DescriptorProto) bool {
|
func HasCompare(file *google_protobuf.FileDescriptorProto, message *google_protobuf.DescriptorProto) bool {
|
||||||
return proto.GetBoolExtension(message.Options, E_Compare, proto.GetBoolExtension(file.Options, E_CompareAll, false))
|
return proto.GetBoolExtension(message.Options, E_Compare, proto.GetBoolExtension(file.Options, E_CompareAll, false))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func RegistersGolangProto(file *google_protobuf.FileDescriptorProto) bool {
|
||||||
|
return proto.GetBoolExtension(file.Options, E_GoprotoRegistration, false)
|
||||||
|
}
|
||||||
|
|
112
vendor/github.com/gogo/protobuf/proto/decode.go
generated
vendored
112
vendor/github.com/gogo/protobuf/proto/decode.go
generated
vendored
|
@ -61,7 +61,6 @@ var ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for
|
||||||
// int32, int64, uint32, uint64, bool, and enum
|
// int32, int64, uint32, uint64, bool, and enum
|
||||||
// protocol buffer types.
|
// protocol buffer types.
|
||||||
func DecodeVarint(buf []byte) (x uint64, n int) {
|
func DecodeVarint(buf []byte) (x uint64, n int) {
|
||||||
// x, n already 0
|
|
||||||
for shift := uint(0); shift < 64; shift += 7 {
|
for shift := uint(0); shift < 64; shift += 7 {
|
||||||
if n >= len(buf) {
|
if n >= len(buf) {
|
||||||
return 0, 0
|
return 0, 0
|
||||||
|
@ -78,13 +77,7 @@ func DecodeVarint(buf []byte) (x uint64, n int) {
|
||||||
return 0, 0
|
return 0, 0
|
||||||
}
|
}
|
||||||
|
|
||||||
// DecodeVarint reads a varint-encoded integer from the Buffer.
|
func (p *Buffer) decodeVarintSlow() (x uint64, err error) {
|
||||||
// This is the format for the
|
|
||||||
// int32, int64, uint32, uint64, bool, and enum
|
|
||||||
// protocol buffer types.
|
|
||||||
func (p *Buffer) DecodeVarint() (x uint64, err error) {
|
|
||||||
// x, err already 0
|
|
||||||
|
|
||||||
i := p.index
|
i := p.index
|
||||||
l := len(p.buf)
|
l := len(p.buf)
|
||||||
|
|
||||||
|
@ -107,6 +100,107 @@ func (p *Buffer) DecodeVarint() (x uint64, err error) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DecodeVarint reads a varint-encoded integer from the Buffer.
|
||||||
|
// This is the format for the
|
||||||
|
// int32, int64, uint32, uint64, bool, and enum
|
||||||
|
// protocol buffer types.
|
||||||
|
func (p *Buffer) DecodeVarint() (x uint64, err error) {
|
||||||
|
i := p.index
|
||||||
|
buf := p.buf
|
||||||
|
|
||||||
|
if i >= len(buf) {
|
||||||
|
return 0, io.ErrUnexpectedEOF
|
||||||
|
} else if buf[i] < 0x80 {
|
||||||
|
p.index++
|
||||||
|
return uint64(buf[i]), nil
|
||||||
|
} else if len(buf)-i < 10 {
|
||||||
|
return p.decodeVarintSlow()
|
||||||
|
}
|
||||||
|
|
||||||
|
var b uint64
|
||||||
|
// we already checked the first byte
|
||||||
|
x = uint64(buf[i]) - 0x80
|
||||||
|
i++
|
||||||
|
|
||||||
|
b = uint64(buf[i])
|
||||||
|
i++
|
||||||
|
x += b << 7
|
||||||
|
if b&0x80 == 0 {
|
||||||
|
goto done
|
||||||
|
}
|
||||||
|
x -= 0x80 << 7
|
||||||
|
|
||||||
|
b = uint64(buf[i])
|
||||||
|
i++
|
||||||
|
x += b << 14
|
||||||
|
if b&0x80 == 0 {
|
||||||
|
goto done
|
||||||
|
}
|
||||||
|
x -= 0x80 << 14
|
||||||
|
|
||||||
|
b = uint64(buf[i])
|
||||||
|
i++
|
||||||
|
x += b << 21
|
||||||
|
if b&0x80 == 0 {
|
||||||
|
goto done
|
||||||
|
}
|
||||||
|
x -= 0x80 << 21
|
||||||
|
|
||||||
|
b = uint64(buf[i])
|
||||||
|
i++
|
||||||
|
x += b << 28
|
||||||
|
if b&0x80 == 0 {
|
||||||
|
goto done
|
||||||
|
}
|
||||||
|
x -= 0x80 << 28
|
||||||
|
|
||||||
|
b = uint64(buf[i])
|
||||||
|
i++
|
||||||
|
x += b << 35
|
||||||
|
if b&0x80 == 0 {
|
||||||
|
goto done
|
||||||
|
}
|
||||||
|
x -= 0x80 << 35
|
||||||
|
|
||||||
|
b = uint64(buf[i])
|
||||||
|
i++
|
||||||
|
x += b << 42
|
||||||
|
if b&0x80 == 0 {
|
||||||
|
goto done
|
||||||
|
}
|
||||||
|
x -= 0x80 << 42
|
||||||
|
|
||||||
|
b = uint64(buf[i])
|
||||||
|
i++
|
||||||
|
x += b << 49
|
||||||
|
if b&0x80 == 0 {
|
||||||
|
goto done
|
||||||
|
}
|
||||||
|
x -= 0x80 << 49
|
||||||
|
|
||||||
|
b = uint64(buf[i])
|
||||||
|
i++
|
||||||
|
x += b << 56
|
||||||
|
if b&0x80 == 0 {
|
||||||
|
goto done
|
||||||
|
}
|
||||||
|
x -= 0x80 << 56
|
||||||
|
|
||||||
|
b = uint64(buf[i])
|
||||||
|
i++
|
||||||
|
x += b << 63
|
||||||
|
if b&0x80 == 0 {
|
||||||
|
goto done
|
||||||
|
}
|
||||||
|
// x -= 0x80 << 63 // Always zero.
|
||||||
|
|
||||||
|
return 0, errOverflow
|
||||||
|
|
||||||
|
done:
|
||||||
|
p.index = i
|
||||||
|
return x, nil
|
||||||
|
}
|
||||||
|
|
||||||
// DecodeFixed64 reads a 64-bit integer from the Buffer.
|
// DecodeFixed64 reads a 64-bit integer from the Buffer.
|
||||||
// This is the format for the
|
// This is the format for the
|
||||||
// fixed64, sfixed64, and double protocol buffer types.
|
// fixed64, sfixed64, and double protocol buffer types.
|
||||||
|
@ -340,6 +434,8 @@ func (p *Buffer) DecodeGroup(pb Message) error {
|
||||||
// Buffer and places the decoded result in pb. If the struct
|
// Buffer and places the decoded result in pb. If the struct
|
||||||
// underlying pb does not match the data in the buffer, the results can be
|
// underlying pb does not match the data in the buffer, the results can be
|
||||||
// unpredictable.
|
// unpredictable.
|
||||||
|
//
|
||||||
|
// Unlike proto.Unmarshal, this does not reset pb before starting to unmarshal.
|
||||||
func (p *Buffer) Unmarshal(pb Message) error {
|
func (p *Buffer) Unmarshal(pb Message) error {
|
||||||
// If the object can unmarshal itself, let it.
|
// If the object can unmarshal itself, let it.
|
||||||
if u, ok := pb.(Unmarshaler); ok {
|
if u, ok := pb.(Unmarshaler); ok {
|
||||||
|
|
5
vendor/github.com/gogo/protobuf/proto/decode_gogo.go
generated
vendored
5
vendor/github.com/gogo/protobuf/proto/decode_gogo.go
generated
vendored
|
@ -98,7 +98,7 @@ func setPtrCustomType(base structPointer, f field, v interface{}) {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
structPointer_SetStructPointer(base, f, structPointer(reflect.ValueOf(v).Pointer()))
|
structPointer_SetStructPointer(base, f, toStructPointer(reflect.ValueOf(v)))
|
||||||
}
|
}
|
||||||
|
|
||||||
func setCustomType(base structPointer, f field, value interface{}) {
|
func setCustomType(base structPointer, f field, value interface{}) {
|
||||||
|
@ -165,7 +165,8 @@ func (o *Buffer) dec_custom_slice_bytes(p *Properties, base structPointer) error
|
||||||
}
|
}
|
||||||
newBas := appendStructPointer(base, p.field, p.ctype)
|
newBas := appendStructPointer(base, p.field, p.ctype)
|
||||||
|
|
||||||
setCustomType(newBas, 0, custom)
|
var zero field
|
||||||
|
setCustomType(newBas, zero, custom)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
100
vendor/github.com/gogo/protobuf/proto/duration.go
generated
vendored
Normal file
100
vendor/github.com/gogo/protobuf/proto/duration.go
generated
vendored
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
// Go support for Protocol Buffers - Google's data interchange format
|
||||||
|
//
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// https://github.com/golang/protobuf
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
// This file implements conversions between google.protobuf.Duration
|
||||||
|
// and time.Duration.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Range of a Duration in seconds, as specified in
|
||||||
|
// google/protobuf/duration.proto. This is about 10,000 years in seconds.
|
||||||
|
maxSeconds = int64(10000 * 365.25 * 24 * 60 * 60)
|
||||||
|
minSeconds = -maxSeconds
|
||||||
|
)
|
||||||
|
|
||||||
|
// validateDuration determines whether the Duration is valid according to the
|
||||||
|
// definition in google/protobuf/duration.proto. A valid Duration
|
||||||
|
// may still be too large to fit into a time.Duration (the range of Duration
|
||||||
|
// is about 10,000 years, and the range of time.Duration is about 290).
|
||||||
|
func validateDuration(d *duration) error {
|
||||||
|
if d == nil {
|
||||||
|
return errors.New("duration: nil Duration")
|
||||||
|
}
|
||||||
|
if d.Seconds < minSeconds || d.Seconds > maxSeconds {
|
||||||
|
return fmt.Errorf("duration: %#v: seconds out of range", d)
|
||||||
|
}
|
||||||
|
if d.Nanos <= -1e9 || d.Nanos >= 1e9 {
|
||||||
|
return fmt.Errorf("duration: %#v: nanos out of range", d)
|
||||||
|
}
|
||||||
|
// Seconds and Nanos must have the same sign, unless d.Nanos is zero.
|
||||||
|
if (d.Seconds < 0 && d.Nanos > 0) || (d.Seconds > 0 && d.Nanos < 0) {
|
||||||
|
return fmt.Errorf("duration: %#v: seconds and nanos have different signs", d)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DurationFromProto converts a Duration to a time.Duration. DurationFromProto
|
||||||
|
// returns an error if the Duration is invalid or is too large to be
|
||||||
|
// represented in a time.Duration.
|
||||||
|
func durationFromProto(p *duration) (time.Duration, error) {
|
||||||
|
if err := validateDuration(p); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
d := time.Duration(p.Seconds) * time.Second
|
||||||
|
if int64(d/time.Second) != p.Seconds {
|
||||||
|
return 0, fmt.Errorf("duration: %#v is out of range for time.Duration", p)
|
||||||
|
}
|
||||||
|
if p.Nanos != 0 {
|
||||||
|
d += time.Duration(p.Nanos)
|
||||||
|
if (d < 0) != (p.Nanos < 0) {
|
||||||
|
return 0, fmt.Errorf("duration: %#v is out of range for time.Duration", p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DurationProto converts a time.Duration to a Duration.
|
||||||
|
func durationProto(d time.Duration) *duration {
|
||||||
|
nanos := d.Nanoseconds()
|
||||||
|
secs := nanos / 1e9
|
||||||
|
nanos -= secs * 1e9
|
||||||
|
return &duration{
|
||||||
|
Seconds: secs,
|
||||||
|
Nanos: int32(nanos),
|
||||||
|
}
|
||||||
|
}
|
203
vendor/github.com/gogo/protobuf/proto/duration_gogo.go
generated
vendored
Normal file
203
vendor/github.com/gogo/protobuf/proto/duration_gogo.go
generated
vendored
Normal file
|
@ -0,0 +1,203 @@
|
||||||
|
// Protocol Buffers for Go with Gadgets
|
||||||
|
//
|
||||||
|
// Copyright (c) 2016, The GoGo Authors. All rights reserved.
|
||||||
|
// http://github.com/gogo/protobuf
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var durationType = reflect.TypeOf((*time.Duration)(nil)).Elem()
|
||||||
|
|
||||||
|
type duration struct {
|
||||||
|
Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"`
|
||||||
|
Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *duration) Reset() { *m = duration{} }
|
||||||
|
func (*duration) ProtoMessage() {}
|
||||||
|
func (*duration) String() string { return "duration<string>" }
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterType((*duration)(nil), "gogo.protobuf.proto.duration")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) decDuration() (time.Duration, error) {
|
||||||
|
b, err := o.DecodeRawBytes(true)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
dproto := &duration{}
|
||||||
|
if err := Unmarshal(b, dproto); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return durationFromProto(dproto)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) dec_duration(p *Properties, base structPointer) error {
|
||||||
|
d, err := o.decDuration()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
word64_Set(structPointer_Word64(base, p.field), o, uint64(d))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) dec_ref_duration(p *Properties, base structPointer) error {
|
||||||
|
d, err := o.decDuration()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
word64Val_Set(structPointer_Word64Val(base, p.field), o, uint64(d))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) dec_slice_duration(p *Properties, base structPointer) error {
|
||||||
|
d, err := o.decDuration()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
newBas := appendStructPointer(base, p.field, reflect.SliceOf(reflect.PtrTo(durationType)))
|
||||||
|
var zero field
|
||||||
|
setPtrCustomType(newBas, zero, &d)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) dec_slice_ref_duration(p *Properties, base structPointer) error {
|
||||||
|
d, err := o.decDuration()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
structPointer_Word64Slice(base, p.field).Append(uint64(d))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func size_duration(p *Properties, base structPointer) (n int) {
|
||||||
|
structp := structPointer_GetStructPointer(base, p.field)
|
||||||
|
if structPointer_IsNil(structp) {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
dur := structPointer_Interface(structp, durationType).(*time.Duration)
|
||||||
|
d := durationProto(*dur)
|
||||||
|
size := Size(d)
|
||||||
|
return size + sizeVarint(uint64(size)) + len(p.tagcode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) enc_duration(p *Properties, base structPointer) error {
|
||||||
|
structp := structPointer_GetStructPointer(base, p.field)
|
||||||
|
if structPointer_IsNil(structp) {
|
||||||
|
return ErrNil
|
||||||
|
}
|
||||||
|
dur := structPointer_Interface(structp, durationType).(*time.Duration)
|
||||||
|
d := durationProto(*dur)
|
||||||
|
data, err := Marshal(d)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
o.buf = append(o.buf, p.tagcode...)
|
||||||
|
o.EncodeRawBytes(data)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func size_ref_duration(p *Properties, base structPointer) (n int) {
|
||||||
|
dur := structPointer_InterfaceAt(base, p.field, durationType).(*time.Duration)
|
||||||
|
d := durationProto(*dur)
|
||||||
|
size := Size(d)
|
||||||
|
return size + sizeVarint(uint64(size)) + len(p.tagcode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) enc_ref_duration(p *Properties, base structPointer) error {
|
||||||
|
dur := structPointer_InterfaceAt(base, p.field, durationType).(*time.Duration)
|
||||||
|
d := durationProto(*dur)
|
||||||
|
data, err := Marshal(d)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
o.buf = append(o.buf, p.tagcode...)
|
||||||
|
o.EncodeRawBytes(data)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func size_slice_duration(p *Properties, base structPointer) (n int) {
|
||||||
|
pdurs := structPointer_InterfaceAt(base, p.field, reflect.SliceOf(reflect.PtrTo(durationType))).(*[]*time.Duration)
|
||||||
|
durs := *pdurs
|
||||||
|
for i := 0; i < len(durs); i++ {
|
||||||
|
if durs[i] == nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
dproto := durationProto(*durs[i])
|
||||||
|
size := Size(dproto)
|
||||||
|
n += len(p.tagcode) + size + sizeVarint(uint64(size))
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) enc_slice_duration(p *Properties, base structPointer) error {
|
||||||
|
pdurs := structPointer_InterfaceAt(base, p.field, reflect.SliceOf(reflect.PtrTo(durationType))).(*[]*time.Duration)
|
||||||
|
durs := *pdurs
|
||||||
|
for i := 0; i < len(durs); i++ {
|
||||||
|
if durs[i] == nil {
|
||||||
|
return errRepeatedHasNil
|
||||||
|
}
|
||||||
|
dproto := durationProto(*durs[i])
|
||||||
|
data, err := Marshal(dproto)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
o.buf = append(o.buf, p.tagcode...)
|
||||||
|
o.EncodeRawBytes(data)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func size_slice_ref_duration(p *Properties, base structPointer) (n int) {
|
||||||
|
pdurs := structPointer_InterfaceAt(base, p.field, reflect.SliceOf(durationType)).(*[]time.Duration)
|
||||||
|
durs := *pdurs
|
||||||
|
for i := 0; i < len(durs); i++ {
|
||||||
|
dproto := durationProto(durs[i])
|
||||||
|
size := Size(dproto)
|
||||||
|
n += len(p.tagcode) + size + sizeVarint(uint64(size))
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) enc_slice_ref_duration(p *Properties, base structPointer) error {
|
||||||
|
pdurs := structPointer_InterfaceAt(base, p.field, reflect.SliceOf(durationType)).(*[]time.Duration)
|
||||||
|
durs := *pdurs
|
||||||
|
for i := 0; i < len(durs); i++ {
|
||||||
|
dproto := durationProto(durs[i])
|
||||||
|
data, err := Marshal(dproto)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
o.buf = append(o.buf, p.tagcode...)
|
||||||
|
o.EncodeRawBytes(data)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
25
vendor/github.com/gogo/protobuf/proto/encode.go
generated
vendored
25
vendor/github.com/gogo/protobuf/proto/encode.go
generated
vendored
|
@ -234,10 +234,6 @@ func Marshal(pb Message) ([]byte, error) {
|
||||||
}
|
}
|
||||||
p := NewBuffer(nil)
|
p := NewBuffer(nil)
|
||||||
err := p.Marshal(pb)
|
err := p.Marshal(pb)
|
||||||
var state errorState
|
|
||||||
if err != nil && !state.shouldContinue(err, nil) {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if p.buf == nil && err == nil {
|
if p.buf == nil && err == nil {
|
||||||
// Return a non-nil slice on success.
|
// Return a non-nil slice on success.
|
||||||
return []byte{}, nil
|
return []byte{}, nil
|
||||||
|
@ -266,11 +262,8 @@ func (p *Buffer) Marshal(pb Message) error {
|
||||||
// Can the object marshal itself?
|
// Can the object marshal itself?
|
||||||
if m, ok := pb.(Marshaler); ok {
|
if m, ok := pb.(Marshaler); ok {
|
||||||
data, err := m.Marshal()
|
data, err := m.Marshal()
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
p.buf = append(p.buf, data...)
|
p.buf = append(p.buf, data...)
|
||||||
return nil
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
t, base, err := getbase(pb)
|
t, base, err := getbase(pb)
|
||||||
|
@ -282,7 +275,7 @@ func (p *Buffer) Marshal(pb Message) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if collectStats {
|
if collectStats {
|
||||||
stats.Encode++
|
(stats).Encode++ // Parens are to work around a goimports bug.
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(p.buf) > maxMarshalSize {
|
if len(p.buf) > maxMarshalSize {
|
||||||
|
@ -309,7 +302,7 @@ func Size(pb Message) (n int) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if collectStats {
|
if collectStats {
|
||||||
stats.Size++
|
(stats).Size++ // Parens are to work around a goimports bug.
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
|
@ -1014,7 +1007,6 @@ func size_slice_struct_message(p *Properties, base structPointer) (n int) {
|
||||||
if p.isMarshaler {
|
if p.isMarshaler {
|
||||||
m := structPointer_Interface(structp, p.stype).(Marshaler)
|
m := structPointer_Interface(structp, p.stype).(Marshaler)
|
||||||
data, _ := m.Marshal()
|
data, _ := m.Marshal()
|
||||||
n += len(p.tagcode)
|
|
||||||
n += sizeRawBytes(data)
|
n += sizeRawBytes(data)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -1083,10 +1075,17 @@ func (o *Buffer) enc_map(p *Properties, base structPointer) error {
|
||||||
|
|
||||||
func (o *Buffer) enc_exts(p *Properties, base structPointer) error {
|
func (o *Buffer) enc_exts(p *Properties, base structPointer) error {
|
||||||
exts := structPointer_Extensions(base, p.field)
|
exts := structPointer_Extensions(base, p.field)
|
||||||
if err := encodeExtensions(exts); err != nil {
|
|
||||||
|
v, mu := exts.extensionsRead()
|
||||||
|
if v == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
mu.Lock()
|
||||||
|
defer mu.Unlock()
|
||||||
|
if err := encodeExtensionsMap(v); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
v, _ := exts.extensionsRead()
|
|
||||||
|
|
||||||
return o.enc_map_body(v)
|
return o.enc_map_body(v)
|
||||||
}
|
}
|
||||||
|
|
16
vendor/github.com/gogo/protobuf/proto/encode_gogo.go
generated
vendored
16
vendor/github.com/gogo/protobuf/proto/encode_gogo.go
generated
vendored
|
@ -196,12 +196,10 @@ func size_ref_struct_message(p *Properties, base structPointer) int {
|
||||||
// Encode a slice of references to message struct pointers ([]struct).
|
// Encode a slice of references to message struct pointers ([]struct).
|
||||||
func (o *Buffer) enc_slice_ref_struct_message(p *Properties, base structPointer) error {
|
func (o *Buffer) enc_slice_ref_struct_message(p *Properties, base structPointer) error {
|
||||||
var state errorState
|
var state errorState
|
||||||
ss := structPointer_GetStructPointer(base, p.field)
|
ss := structPointer_StructRefSlice(base, p.field, p.stype.Size())
|
||||||
ss1 := structPointer_GetRefStructPointer(ss, field(0))
|
l := ss.Len()
|
||||||
size := p.stype.Size()
|
|
||||||
l := structPointer_Len(base, p.field)
|
|
||||||
for i := 0; i < l; i++ {
|
for i := 0; i < l; i++ {
|
||||||
structp := structPointer_Add(ss1, field(uintptr(i)*size))
|
structp := ss.Index(i)
|
||||||
if structPointer_IsNil(structp) {
|
if structPointer_IsNil(structp) {
|
||||||
return errRepeatedHasNil
|
return errRepeatedHasNil
|
||||||
}
|
}
|
||||||
|
@ -233,13 +231,11 @@ func (o *Buffer) enc_slice_ref_struct_message(p *Properties, base structPointer)
|
||||||
|
|
||||||
//TODO this is only copied, please fix this
|
//TODO this is only copied, please fix this
|
||||||
func size_slice_ref_struct_message(p *Properties, base structPointer) (n int) {
|
func size_slice_ref_struct_message(p *Properties, base structPointer) (n int) {
|
||||||
ss := structPointer_GetStructPointer(base, p.field)
|
ss := structPointer_StructRefSlice(base, p.field, p.stype.Size())
|
||||||
ss1 := structPointer_GetRefStructPointer(ss, field(0))
|
l := ss.Len()
|
||||||
size := p.stype.Size()
|
|
||||||
l := structPointer_Len(base, p.field)
|
|
||||||
n += l * len(p.tagcode)
|
n += l * len(p.tagcode)
|
||||||
for i := 0; i < l; i++ {
|
for i := 0; i < l; i++ {
|
||||||
structp := structPointer_Add(ss1, field(uintptr(i)*size))
|
structp := ss.Index(i)
|
||||||
if structPointer_IsNil(structp) {
|
if structPointer_IsNil(structp) {
|
||||||
return // return the size up to this point
|
return // return the size up to this point
|
||||||
}
|
}
|
||||||
|
|
8
vendor/github.com/gogo/protobuf/proto/equal.go
generated
vendored
8
vendor/github.com/gogo/protobuf/proto/equal.go
generated
vendored
|
@ -54,13 +54,17 @@ Equality is defined in this way:
|
||||||
in a proto3 .proto file, fields are not "set"; specifically,
|
in a proto3 .proto file, fields are not "set"; specifically,
|
||||||
zero length proto3 "bytes" fields are equal (nil == {}).
|
zero length proto3 "bytes" fields are equal (nil == {}).
|
||||||
- Two repeated fields are equal iff their lengths are the same,
|
- Two repeated fields are equal iff their lengths are the same,
|
||||||
and their corresponding elements are equal (a "bytes" field,
|
and their corresponding elements are equal. Note a "bytes" field,
|
||||||
although represented by []byte, is not a repeated field)
|
although represented by []byte, is not a repeated field and the
|
||||||
|
rule for the scalar fields described above applies.
|
||||||
- Two unset fields are equal.
|
- Two unset fields are equal.
|
||||||
- Two unknown field sets are equal if their current
|
- Two unknown field sets are equal if their current
|
||||||
encoded state is equal.
|
encoded state is equal.
|
||||||
- Two extension sets are equal iff they have corresponding
|
- Two extension sets are equal iff they have corresponding
|
||||||
elements that are pairwise equal.
|
elements that are pairwise equal.
|
||||||
|
- Two map fields are equal iff their lengths are the same,
|
||||||
|
and they contain the same set of elements. Zero-length map
|
||||||
|
fields are equal.
|
||||||
- Every other combination of things are not equal.
|
- Every other combination of things are not equal.
|
||||||
|
|
||||||
The return value is undefined if a and b are not protocol buffers.
|
The return value is undefined if a and b are not protocol buffers.
|
||||||
|
|
4
vendor/github.com/gogo/protobuf/proto/extensions.go
generated
vendored
4
vendor/github.com/gogo/protobuf/proto/extensions.go
generated
vendored
|
@ -167,6 +167,7 @@ type ExtensionDesc struct {
|
||||||
Field int32 // field number
|
Field int32 // field number
|
||||||
Name string // fully-qualified name of extension, for text formatting
|
Name string // fully-qualified name of extension, for text formatting
|
||||||
Tag string // protobuf tag style
|
Tag string // protobuf tag style
|
||||||
|
Filename string // name of the file in which the extension is defined
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ed *ExtensionDesc) repeated() bool {
|
func (ed *ExtensionDesc) repeated() bool {
|
||||||
|
@ -587,6 +588,9 @@ func ExtensionDescs(pb Message) ([]*ExtensionDesc, error) {
|
||||||
registeredExtensions := RegisteredExtensions(pb)
|
registeredExtensions := RegisteredExtensions(pb)
|
||||||
|
|
||||||
emap, mu := epb.extensionsRead()
|
emap, mu := epb.extensionsRead()
|
||||||
|
if emap == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
mu.Lock()
|
mu.Lock()
|
||||||
defer mu.Unlock()
|
defer mu.Unlock()
|
||||||
extensions := make([]*ExtensionDesc, 0, len(emap))
|
extensions := make([]*ExtensionDesc, 0, len(emap))
|
||||||
|
|
2
vendor/github.com/gogo/protobuf/proto/lib.go
generated
vendored
2
vendor/github.com/gogo/protobuf/proto/lib.go
generated
vendored
|
@ -308,7 +308,7 @@ func GetStats() Stats { return stats }
|
||||||
// temporary Buffer and are fine for most applications.
|
// temporary Buffer and are fine for most applications.
|
||||||
type Buffer struct {
|
type Buffer struct {
|
||||||
buf []byte // encode/decode byte stream
|
buf []byte // encode/decode byte stream
|
||||||
index int // write point
|
index int // read point
|
||||||
|
|
||||||
// pools of basic types to amortize allocation.
|
// pools of basic types to amortize allocation.
|
||||||
bools []bool
|
bools []bool
|
||||||
|
|
85
vendor/github.com/gogo/protobuf/proto/pointer_reflect_gogo.go
generated
vendored
Normal file
85
vendor/github.com/gogo/protobuf/proto/pointer_reflect_gogo.go
generated
vendored
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
// Protocol Buffers for Go with Gadgets
|
||||||
|
//
|
||||||
|
// Copyright (c) 2016, The GoGo Authors. All rights reserved.
|
||||||
|
// http://github.com/gogo/protobuf
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// +build appengine js
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
func structPointer_FieldPointer(p structPointer, f field) structPointer {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructPointer(base structPointer, f field, typ reflect.Type) structPointer {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func structPointer_InterfaceAt(p structPointer, f field, t reflect.Type) interface{} {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func structPointer_InterfaceRef(p structPointer, f field, t reflect.Type) interface{} {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func structPointer_GetRefStructPointer(p structPointer, f field) structPointer {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func structPointer_Add(p structPointer, size field) structPointer {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func structPointer_Len(p structPointer, f field) int {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func structPointer_GetSliceHeader(p structPointer, f field) *reflect.SliceHeader {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func structPointer_Copy(oldptr structPointer, newptr structPointer, size int) {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func structPointer_StructRefSlice(p structPointer, f field, size uintptr) *structRefSlice {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
type structRefSlice struct{}
|
||||||
|
|
||||||
|
func (v *structRefSlice) Len() int {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *structRefSlice) Index(i int) structPointer {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
23
vendor/github.com/gogo/protobuf/proto/pointer_unsafe_gogo.go
generated
vendored
23
vendor/github.com/gogo/protobuf/proto/pointer_unsafe_gogo.go
generated
vendored
|
@ -26,7 +26,7 @@
|
||||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
// +build !appengine
|
// +build !appengine,!js
|
||||||
|
|
||||||
// This file contains the implementation of the proto field accesses using package unsafe.
|
// This file contains the implementation of the proto field accesses using package unsafe.
|
||||||
|
|
||||||
|
@ -105,3 +105,24 @@ func structPointer_Add(p structPointer, size field) structPointer {
|
||||||
func structPointer_Len(p structPointer, f field) int {
|
func structPointer_Len(p structPointer, f field) int {
|
||||||
return len(*(*[]interface{})(unsafe.Pointer(structPointer_GetRefStructPointer(p, f))))
|
return len(*(*[]interface{})(unsafe.Pointer(structPointer_GetRefStructPointer(p, f))))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func structPointer_StructRefSlice(p structPointer, f field, size uintptr) *structRefSlice {
|
||||||
|
return &structRefSlice{p: p, f: f, size: size}
|
||||||
|
}
|
||||||
|
|
||||||
|
// A structRefSlice represents a slice of structs (themselves submessages or groups).
|
||||||
|
type structRefSlice struct {
|
||||||
|
p structPointer
|
||||||
|
f field
|
||||||
|
size uintptr
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *structRefSlice) Len() int {
|
||||||
|
return structPointer_Len(v.p, v.f)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *structRefSlice) Index(i int) structPointer {
|
||||||
|
ss := structPointer_GetStructPointer(v.p, v.f)
|
||||||
|
ss1 := structPointer_GetRefStructPointer(ss, 0)
|
||||||
|
return structPointer_Add(ss1, field(uintptr(i)*v.size))
|
||||||
|
}
|
||||||
|
|
34
vendor/github.com/gogo/protobuf/proto/properties.go
generated
vendored
34
vendor/github.com/gogo/protobuf/proto/properties.go
generated
vendored
|
@ -193,7 +193,8 @@ type Properties struct {
|
||||||
Default string // default value
|
Default string // default value
|
||||||
HasDefault bool // whether an explicit default was provided
|
HasDefault bool // whether an explicit default was provided
|
||||||
CustomType string
|
CustomType string
|
||||||
def_uint64 uint64
|
StdTime bool
|
||||||
|
StdDuration bool
|
||||||
|
|
||||||
enc encoder
|
enc encoder
|
||||||
valEnc valueEncoder // set for bool and numeric types only
|
valEnc valueEncoder // set for bool and numeric types only
|
||||||
|
@ -340,6 +341,10 @@ func (p *Properties) Parse(s string) {
|
||||||
p.OrigName = strings.Split(f, "=")[1]
|
p.OrigName = strings.Split(f, "=")[1]
|
||||||
case strings.HasPrefix(f, "customtype="):
|
case strings.HasPrefix(f, "customtype="):
|
||||||
p.CustomType = strings.Split(f, "=")[1]
|
p.CustomType = strings.Split(f, "=")[1]
|
||||||
|
case f == "stdtime":
|
||||||
|
p.StdTime = true
|
||||||
|
case f == "stdduration":
|
||||||
|
p.StdDuration = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -355,11 +360,22 @@ func (p *Properties) setEncAndDec(typ reflect.Type, f *reflect.StructField, lock
|
||||||
p.enc = nil
|
p.enc = nil
|
||||||
p.dec = nil
|
p.dec = nil
|
||||||
p.size = nil
|
p.size = nil
|
||||||
if len(p.CustomType) > 0 {
|
isMap := typ.Kind() == reflect.Map
|
||||||
|
if len(p.CustomType) > 0 && !isMap {
|
||||||
p.setCustomEncAndDec(typ)
|
p.setCustomEncAndDec(typ)
|
||||||
p.setTag(lockGetProp)
|
p.setTag(lockGetProp)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if p.StdTime && !isMap {
|
||||||
|
p.setTimeEncAndDec(typ)
|
||||||
|
p.setTag(lockGetProp)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if p.StdDuration && !isMap {
|
||||||
|
p.setDurationEncAndDec(typ)
|
||||||
|
p.setTag(lockGetProp)
|
||||||
|
return
|
||||||
|
}
|
||||||
switch t1 := typ; t1.Kind() {
|
switch t1 := typ; t1.Kind() {
|
||||||
default:
|
default:
|
||||||
fmt.Fprintf(os.Stderr, "proto: no coders for %v\n", t1)
|
fmt.Fprintf(os.Stderr, "proto: no coders for %v\n", t1)
|
||||||
|
@ -630,6 +646,10 @@ func (p *Properties) setEncAndDec(typ reflect.Type, f *reflect.StructField, lock
|
||||||
// so we need encoders for the pointer to this type.
|
// so we need encoders for the pointer to this type.
|
||||||
vtype = reflect.PtrTo(vtype)
|
vtype = reflect.PtrTo(vtype)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
p.mvalprop.CustomType = p.CustomType
|
||||||
|
p.mvalprop.StdDuration = p.StdDuration
|
||||||
|
p.mvalprop.StdTime = p.StdTime
|
||||||
p.mvalprop.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp)
|
p.mvalprop.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp)
|
||||||
}
|
}
|
||||||
p.setTag(lockGetProp)
|
p.setTag(lockGetProp)
|
||||||
|
@ -920,7 +940,15 @@ func RegisterType(x Message, name string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// MessageName returns the fully-qualified proto name for the given message type.
|
// MessageName returns the fully-qualified proto name for the given message type.
|
||||||
func MessageName(x Message) string { return revProtoTypes[reflect.TypeOf(x)] }
|
func MessageName(x Message) string {
|
||||||
|
type xname interface {
|
||||||
|
XXX_MessageName() string
|
||||||
|
}
|
||||||
|
if m, ok := x.(xname); ok {
|
||||||
|
return m.XXX_MessageName()
|
||||||
|
}
|
||||||
|
return revProtoTypes[reflect.TypeOf(x)]
|
||||||
|
}
|
||||||
|
|
||||||
// MessageType returns the message type (pointer to struct) for a named message.
|
// MessageType returns the message type (pointer to struct) for a named message.
|
||||||
func MessageType(name string) reflect.Type { return protoTypes[name] }
|
func MessageType(name string) reflect.Type { return protoTypes[name] }
|
||||||
|
|
45
vendor/github.com/gogo/protobuf/proto/properties_gogo.go
generated
vendored
45
vendor/github.com/gogo/protobuf/proto/properties_gogo.go
generated
vendored
|
@ -51,6 +51,51 @@ func (p *Properties) setCustomEncAndDec(typ reflect.Type) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *Properties) setDurationEncAndDec(typ reflect.Type) {
|
||||||
|
if p.Repeated {
|
||||||
|
if typ.Elem().Kind() == reflect.Ptr {
|
||||||
|
p.enc = (*Buffer).enc_slice_duration
|
||||||
|
p.dec = (*Buffer).dec_slice_duration
|
||||||
|
p.size = size_slice_duration
|
||||||
|
} else {
|
||||||
|
p.enc = (*Buffer).enc_slice_ref_duration
|
||||||
|
p.dec = (*Buffer).dec_slice_ref_duration
|
||||||
|
p.size = size_slice_ref_duration
|
||||||
|
}
|
||||||
|
} else if typ.Kind() == reflect.Ptr {
|
||||||
|
p.enc = (*Buffer).enc_duration
|
||||||
|
p.dec = (*Buffer).dec_duration
|
||||||
|
p.size = size_duration
|
||||||
|
} else {
|
||||||
|
p.enc = (*Buffer).enc_ref_duration
|
||||||
|
p.dec = (*Buffer).dec_ref_duration
|
||||||
|
p.size = size_ref_duration
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Properties) setTimeEncAndDec(typ reflect.Type) {
|
||||||
|
if p.Repeated {
|
||||||
|
if typ.Elem().Kind() == reflect.Ptr {
|
||||||
|
p.enc = (*Buffer).enc_slice_time
|
||||||
|
p.dec = (*Buffer).dec_slice_time
|
||||||
|
p.size = size_slice_time
|
||||||
|
} else {
|
||||||
|
p.enc = (*Buffer).enc_slice_ref_time
|
||||||
|
p.dec = (*Buffer).dec_slice_ref_time
|
||||||
|
p.size = size_slice_ref_time
|
||||||
|
}
|
||||||
|
} else if typ.Kind() == reflect.Ptr {
|
||||||
|
p.enc = (*Buffer).enc_time
|
||||||
|
p.dec = (*Buffer).dec_time
|
||||||
|
p.size = size_time
|
||||||
|
} else {
|
||||||
|
p.enc = (*Buffer).enc_ref_time
|
||||||
|
p.dec = (*Buffer).dec_ref_time
|
||||||
|
p.size = size_ref_time
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func (p *Properties) setSliceOfNonPointerStructs(typ reflect.Type) {
|
func (p *Properties) setSliceOfNonPointerStructs(typ reflect.Type) {
|
||||||
t2 := typ.Elem()
|
t2 := typ.Elem()
|
||||||
p.sstype = typ
|
p.sstype = typ
|
||||||
|
|
163
vendor/github.com/gogo/protobuf/proto/text.go
generated
vendored
163
vendor/github.com/gogo/protobuf/proto/text.go
generated
vendored
|
@ -51,6 +51,7 @@ import (
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -181,7 +182,93 @@ type raw interface {
|
||||||
Bytes() []byte
|
Bytes() []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeStruct(w *textWriter, sv reflect.Value) error {
|
func requiresQuotes(u string) bool {
|
||||||
|
// When type URL contains any characters except [0-9A-Za-z./\-]*, it must be quoted.
|
||||||
|
for _, ch := range u {
|
||||||
|
switch {
|
||||||
|
case ch == '.' || ch == '/' || ch == '_':
|
||||||
|
continue
|
||||||
|
case '0' <= ch && ch <= '9':
|
||||||
|
continue
|
||||||
|
case 'A' <= ch && ch <= 'Z':
|
||||||
|
continue
|
||||||
|
case 'a' <= ch && ch <= 'z':
|
||||||
|
continue
|
||||||
|
default:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// isAny reports whether sv is a google.protobuf.Any message
|
||||||
|
func isAny(sv reflect.Value) bool {
|
||||||
|
type wkt interface {
|
||||||
|
XXX_WellKnownType() string
|
||||||
|
}
|
||||||
|
t, ok := sv.Addr().Interface().(wkt)
|
||||||
|
return ok && t.XXX_WellKnownType() == "Any"
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeProto3Any writes an expanded google.protobuf.Any message.
|
||||||
|
//
|
||||||
|
// It returns (false, nil) if sv value can't be unmarshaled (e.g. because
|
||||||
|
// required messages are not linked in).
|
||||||
|
//
|
||||||
|
// It returns (true, error) when sv was written in expanded format or an error
|
||||||
|
// was encountered.
|
||||||
|
func (tm *TextMarshaler) writeProto3Any(w *textWriter, sv reflect.Value) (bool, error) {
|
||||||
|
turl := sv.FieldByName("TypeUrl")
|
||||||
|
val := sv.FieldByName("Value")
|
||||||
|
if !turl.IsValid() || !val.IsValid() {
|
||||||
|
return true, errors.New("proto: invalid google.protobuf.Any message")
|
||||||
|
}
|
||||||
|
|
||||||
|
b, ok := val.Interface().([]byte)
|
||||||
|
if !ok {
|
||||||
|
return true, errors.New("proto: invalid google.protobuf.Any message")
|
||||||
|
}
|
||||||
|
|
||||||
|
parts := strings.Split(turl.String(), "/")
|
||||||
|
mt := MessageType(parts[len(parts)-1])
|
||||||
|
if mt == nil {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
m := reflect.New(mt.Elem())
|
||||||
|
if err := Unmarshal(b, m.Interface().(Message)); err != nil {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
w.Write([]byte("["))
|
||||||
|
u := turl.String()
|
||||||
|
if requiresQuotes(u) {
|
||||||
|
writeString(w, u)
|
||||||
|
} else {
|
||||||
|
w.Write([]byte(u))
|
||||||
|
}
|
||||||
|
if w.compact {
|
||||||
|
w.Write([]byte("]:<"))
|
||||||
|
} else {
|
||||||
|
w.Write([]byte("]: <\n"))
|
||||||
|
w.ind++
|
||||||
|
}
|
||||||
|
if err := tm.writeStruct(w, m.Elem()); err != nil {
|
||||||
|
return true, err
|
||||||
|
}
|
||||||
|
if w.compact {
|
||||||
|
w.Write([]byte("> "))
|
||||||
|
} else {
|
||||||
|
w.ind--
|
||||||
|
w.Write([]byte(">\n"))
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error {
|
||||||
|
if tm.ExpandAny && isAny(sv) {
|
||||||
|
if canExpand, err := tm.writeProto3Any(w, sv); canExpand {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
st := sv.Type()
|
st := sv.Type()
|
||||||
sprops := GetProperties(st)
|
sprops := GetProperties(st)
|
||||||
for i := 0; i < sv.NumField(); i++ {
|
for i := 0; i < sv.NumField(); i++ {
|
||||||
|
@ -234,10 +321,10 @@ func writeStruct(w *textWriter, sv reflect.Value) error {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if len(props.Enum) > 0 {
|
if len(props.Enum) > 0 {
|
||||||
if err := writeEnum(w, v, props); err != nil {
|
if err := tm.writeEnum(w, v, props); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else if err := writeAny(w, v, props); err != nil {
|
} else if err := tm.writeAny(w, v, props); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := w.WriteByte('\n'); err != nil {
|
if err := w.WriteByte('\n'); err != nil {
|
||||||
|
@ -279,7 +366,7 @@ func writeStruct(w *textWriter, sv reflect.Value) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err := writeAny(w, key, props.mkeyprop); err != nil {
|
if err := tm.writeAny(w, key, props.mkeyprop); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := w.WriteByte('\n'); err != nil {
|
if err := w.WriteByte('\n'); err != nil {
|
||||||
|
@ -296,7 +383,7 @@ func writeStruct(w *textWriter, sv reflect.Value) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err := writeAny(w, val, props.mvalprop); err != nil {
|
if err := tm.writeAny(w, val, props.mvalprop); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := w.WriteByte('\n'); err != nil {
|
if err := w.WriteByte('\n'); err != nil {
|
||||||
|
@ -368,10 +455,10 @@ func writeStruct(w *textWriter, sv reflect.Value) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(props.Enum) > 0 {
|
if len(props.Enum) > 0 {
|
||||||
if err := writeEnum(w, fv, props); err != nil {
|
if err := tm.writeEnum(w, fv, props); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else if err := writeAny(w, fv, props); err != nil {
|
} else if err := tm.writeAny(w, fv, props); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -389,7 +476,7 @@ func writeStruct(w *textWriter, sv reflect.Value) error {
|
||||||
pv.Elem().Set(sv)
|
pv.Elem().Set(sv)
|
||||||
}
|
}
|
||||||
if pv.Type().Implements(extensionRangeType) {
|
if pv.Type().Implements(extensionRangeType) {
|
||||||
if err := writeExtensions(w, pv); err != nil {
|
if err := tm.writeExtensions(w, pv); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -419,10 +506,11 @@ func writeRaw(w *textWriter, b []byte) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// writeAny writes an arbitrary field.
|
// writeAny writes an arbitrary field.
|
||||||
func writeAny(w *textWriter, v reflect.Value, props *Properties) error {
|
func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Properties) error {
|
||||||
v = reflect.Indirect(v)
|
v = reflect.Indirect(v)
|
||||||
|
|
||||||
if props != nil && len(props.CustomType) > 0 {
|
if props != nil {
|
||||||
|
if len(props.CustomType) > 0 {
|
||||||
custom, ok := v.Interface().(Marshaler)
|
custom, ok := v.Interface().(Marshaler)
|
||||||
if ok {
|
if ok {
|
||||||
data, err := custom.Marshal()
|
data, err := custom.Marshal()
|
||||||
|
@ -434,6 +522,30 @@ func writeAny(w *textWriter, v reflect.Value, props *Properties) error {
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
} else if props.StdTime {
|
||||||
|
t, ok := v.Interface().(time.Time)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("stdtime is not time.Time, but %T", v.Interface())
|
||||||
|
}
|
||||||
|
tproto, err := timestampProto(t)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
props.StdTime = false
|
||||||
|
err = tm.writeAny(w, reflect.ValueOf(tproto), props)
|
||||||
|
props.StdTime = true
|
||||||
|
return err
|
||||||
|
} else if props.StdDuration {
|
||||||
|
d, ok := v.Interface().(time.Duration)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("stdtime is not time.Duration, but %T", v.Interface())
|
||||||
|
}
|
||||||
|
dproto := durationProto(d)
|
||||||
|
props.StdDuration = false
|
||||||
|
err := tm.writeAny(w, reflect.ValueOf(dproto), props)
|
||||||
|
props.StdDuration = true
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Floats have special cases.
|
// Floats have special cases.
|
||||||
|
@ -482,15 +594,15 @@ func writeAny(w *textWriter, v reflect.Value, props *Properties) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
w.indent()
|
w.indent()
|
||||||
if tm, ok := v.Interface().(encoding.TextMarshaler); ok {
|
if etm, ok := v.Interface().(encoding.TextMarshaler); ok {
|
||||||
text, err := tm.MarshalText()
|
text, err := etm.MarshalText()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if _, err = w.Write(text); err != nil {
|
if _, err = w.Write(text); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else if err := writeStruct(w, v); err != nil {
|
} else if err := tm.writeStruct(w, v); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
w.unindent()
|
w.unindent()
|
||||||
|
@ -634,7 +746,7 @@ func (s int32Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||||
|
|
||||||
// writeExtensions writes all the extensions in pv.
|
// writeExtensions writes all the extensions in pv.
|
||||||
// pv is assumed to be a pointer to a protocol message struct that is extendable.
|
// pv is assumed to be a pointer to a protocol message struct that is extendable.
|
||||||
func writeExtensions(w *textWriter, pv reflect.Value) error {
|
func (tm *TextMarshaler) writeExtensions(w *textWriter, pv reflect.Value) error {
|
||||||
emap := extensionMaps[pv.Type().Elem()]
|
emap := extensionMaps[pv.Type().Elem()]
|
||||||
e := pv.Interface().(Message)
|
e := pv.Interface().(Message)
|
||||||
|
|
||||||
|
@ -689,13 +801,13 @@ func writeExtensions(w *textWriter, pv reflect.Value) error {
|
||||||
|
|
||||||
// Repeated extensions will appear as a slice.
|
// Repeated extensions will appear as a slice.
|
||||||
if !desc.repeated() {
|
if !desc.repeated() {
|
||||||
if err := writeExtension(w, desc.Name, pb); err != nil {
|
if err := tm.writeExtension(w, desc.Name, pb); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
v := reflect.ValueOf(pb)
|
v := reflect.ValueOf(pb)
|
||||||
for i := 0; i < v.Len(); i++ {
|
for i := 0; i < v.Len(); i++ {
|
||||||
if err := writeExtension(w, desc.Name, v.Index(i).Interface()); err != nil {
|
if err := tm.writeExtension(w, desc.Name, v.Index(i).Interface()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -704,7 +816,7 @@ func writeExtensions(w *textWriter, pv reflect.Value) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeExtension(w *textWriter, name string, pb interface{}) error {
|
func (tm *TextMarshaler) writeExtension(w *textWriter, name string, pb interface{}) error {
|
||||||
if _, err := fmt.Fprintf(w, "[%s]:", name); err != nil {
|
if _, err := fmt.Fprintf(w, "[%s]:", name); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -713,7 +825,7 @@ func writeExtension(w *textWriter, name string, pb interface{}) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err := writeAny(w, reflect.ValueOf(pb), nil); err != nil {
|
if err := tm.writeAny(w, reflect.ValueOf(pb), nil); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := w.WriteByte('\n'); err != nil {
|
if err := w.WriteByte('\n'); err != nil {
|
||||||
|
@ -741,11 +853,12 @@ func (w *textWriter) writeIndent() {
|
||||||
// TextMarshaler is a configurable text format marshaler.
|
// TextMarshaler is a configurable text format marshaler.
|
||||||
type TextMarshaler struct {
|
type TextMarshaler struct {
|
||||||
Compact bool // use compact text format (one line).
|
Compact bool // use compact text format (one line).
|
||||||
|
ExpandAny bool // expand google.protobuf.Any messages of known types
|
||||||
}
|
}
|
||||||
|
|
||||||
// Marshal writes a given protocol buffer in text format.
|
// Marshal writes a given protocol buffer in text format.
|
||||||
// The only errors returned are from w.
|
// The only errors returned are from w.
|
||||||
func (m *TextMarshaler) Marshal(w io.Writer, pb Message) error {
|
func (tm *TextMarshaler) Marshal(w io.Writer, pb Message) error {
|
||||||
val := reflect.ValueOf(pb)
|
val := reflect.ValueOf(pb)
|
||||||
if pb == nil || val.IsNil() {
|
if pb == nil || val.IsNil() {
|
||||||
w.Write([]byte("<nil>"))
|
w.Write([]byte("<nil>"))
|
||||||
|
@ -760,11 +873,11 @@ func (m *TextMarshaler) Marshal(w io.Writer, pb Message) error {
|
||||||
aw := &textWriter{
|
aw := &textWriter{
|
||||||
w: ww,
|
w: ww,
|
||||||
complete: true,
|
complete: true,
|
||||||
compact: m.Compact,
|
compact: tm.Compact,
|
||||||
}
|
}
|
||||||
|
|
||||||
if tm, ok := pb.(encoding.TextMarshaler); ok {
|
if etm, ok := pb.(encoding.TextMarshaler); ok {
|
||||||
text, err := tm.MarshalText()
|
text, err := etm.MarshalText()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -778,7 +891,7 @@ func (m *TextMarshaler) Marshal(w io.Writer, pb Message) error {
|
||||||
}
|
}
|
||||||
// Dereference the received pointer so we don't have outer < and >.
|
// Dereference the received pointer so we don't have outer < and >.
|
||||||
v := reflect.Indirect(val)
|
v := reflect.Indirect(val)
|
||||||
if err := writeStruct(aw, v); err != nil {
|
if err := tm.writeStruct(aw, v); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if bw != nil {
|
if bw != nil {
|
||||||
|
@ -788,9 +901,9 @@ func (m *TextMarshaler) Marshal(w io.Writer, pb Message) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Text is the same as Marshal, but returns the string directly.
|
// Text is the same as Marshal, but returns the string directly.
|
||||||
func (m *TextMarshaler) Text(pb Message) string {
|
func (tm *TextMarshaler) Text(pb Message) string {
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
m.Marshal(&buf, pb)
|
tm.Marshal(&buf, pb)
|
||||||
return buf.String()
|
return buf.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
6
vendor/github.com/gogo/protobuf/proto/text_gogo.go
generated
vendored
6
vendor/github.com/gogo/protobuf/proto/text_gogo.go
generated
vendored
|
@ -33,10 +33,10 @@ import (
|
||||||
"reflect"
|
"reflect"
|
||||||
)
|
)
|
||||||
|
|
||||||
func writeEnum(w *textWriter, v reflect.Value, props *Properties) error {
|
func (tm *TextMarshaler) writeEnum(w *textWriter, v reflect.Value, props *Properties) error {
|
||||||
m, ok := enumStringMaps[props.Enum]
|
m, ok := enumStringMaps[props.Enum]
|
||||||
if !ok {
|
if !ok {
|
||||||
if err := writeAny(w, v, props); err != nil {
|
if err := tm.writeAny(w, v, props); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,7 @@ func writeEnum(w *textWriter, v reflect.Value, props *Properties) error {
|
||||||
}
|
}
|
||||||
s, ok := m[key]
|
s, ok := m[key]
|
||||||
if !ok {
|
if !ok {
|
||||||
if err := writeAny(w, v, props); err != nil {
|
if err := tm.writeAny(w, v, props); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
189
vendor/github.com/gogo/protobuf/proto/text_parser.go
generated
vendored
189
vendor/github.com/gogo/protobuf/proto/text_parser.go
generated
vendored
|
@ -46,9 +46,13 @@ import (
|
||||||
"reflect"
|
"reflect"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Error string emitted when deserializing Any and fields are already set
|
||||||
|
const anyRepeatedlyUnpacked = "Any message unpacked multiple times, or %q already set"
|
||||||
|
|
||||||
type ParseError struct {
|
type ParseError struct {
|
||||||
Message string
|
Message string
|
||||||
Line int // 1-based line number
|
Line int // 1-based line number
|
||||||
|
@ -168,7 +172,7 @@ func (p *textParser) advance() {
|
||||||
p.cur.offset, p.cur.line = p.offset, p.line
|
p.cur.offset, p.cur.line = p.offset, p.line
|
||||||
p.cur.unquoted = ""
|
p.cur.unquoted = ""
|
||||||
switch p.s[0] {
|
switch p.s[0] {
|
||||||
case '<', '>', '{', '}', ':', '[', ']', ';', ',':
|
case '<', '>', '{', '}', ':', '[', ']', ';', ',', '/':
|
||||||
// Single symbol
|
// Single symbol
|
||||||
p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)]
|
p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)]
|
||||||
case '"', '\'':
|
case '"', '\'':
|
||||||
|
@ -456,7 +460,10 @@ func (p *textParser) readStruct(sv reflect.Value, terminator string) error {
|
||||||
fieldSet := make(map[string]bool)
|
fieldSet := make(map[string]bool)
|
||||||
// A struct is a sequence of "name: value", terminated by one of
|
// A struct is a sequence of "name: value", terminated by one of
|
||||||
// '>' or '}', or the end of the input. A name may also be
|
// '>' or '}', or the end of the input. A name may also be
|
||||||
// "[extension]".
|
// "[extension]" or "[type/url]".
|
||||||
|
//
|
||||||
|
// The whole struct can also be an expanded Any message, like:
|
||||||
|
// [type/url] < ... struct contents ... >
|
||||||
for {
|
for {
|
||||||
tok := p.next()
|
tok := p.next()
|
||||||
if tok.err != nil {
|
if tok.err != nil {
|
||||||
|
@ -466,33 +473,74 @@ func (p *textParser) readStruct(sv reflect.Value, terminator string) error {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
if tok.value == "[" {
|
if tok.value == "[" {
|
||||||
// Looks like an extension.
|
// Looks like an extension or an Any.
|
||||||
//
|
//
|
||||||
// TODO: Check whether we need to handle
|
// TODO: Check whether we need to handle
|
||||||
// namespace rooted names (e.g. ".something.Foo").
|
// namespace rooted names (e.g. ".something.Foo").
|
||||||
|
extName, err := p.consumeExtName()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if s := strings.LastIndex(extName, "/"); s >= 0 {
|
||||||
|
// If it contains a slash, it's an Any type URL.
|
||||||
|
messageName := extName[s+1:]
|
||||||
|
mt := MessageType(messageName)
|
||||||
|
if mt == nil {
|
||||||
|
return p.errorf("unrecognized message %q in google.protobuf.Any", messageName)
|
||||||
|
}
|
||||||
tok = p.next()
|
tok = p.next()
|
||||||
if tok.err != nil {
|
if tok.err != nil {
|
||||||
return tok.err
|
return tok.err
|
||||||
}
|
}
|
||||||
|
// consume an optional colon
|
||||||
|
if tok.value == ":" {
|
||||||
|
tok = p.next()
|
||||||
|
if tok.err != nil {
|
||||||
|
return tok.err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var terminator string
|
||||||
|
switch tok.value {
|
||||||
|
case "<":
|
||||||
|
terminator = ">"
|
||||||
|
case "{":
|
||||||
|
terminator = "}"
|
||||||
|
default:
|
||||||
|
return p.errorf("expected '{' or '<', found %q", tok.value)
|
||||||
|
}
|
||||||
|
v := reflect.New(mt.Elem())
|
||||||
|
if pe := p.readStruct(v.Elem(), terminator); pe != nil {
|
||||||
|
return pe
|
||||||
|
}
|
||||||
|
b, err := Marshal(v.Interface().(Message))
|
||||||
|
if err != nil {
|
||||||
|
return p.errorf("failed to marshal message of type %q: %v", messageName, err)
|
||||||
|
}
|
||||||
|
if fieldSet["type_url"] {
|
||||||
|
return p.errorf(anyRepeatedlyUnpacked, "type_url")
|
||||||
|
}
|
||||||
|
if fieldSet["value"] {
|
||||||
|
return p.errorf(anyRepeatedlyUnpacked, "value")
|
||||||
|
}
|
||||||
|
sv.FieldByName("TypeUrl").SetString(extName)
|
||||||
|
sv.FieldByName("Value").SetBytes(b)
|
||||||
|
fieldSet["type_url"] = true
|
||||||
|
fieldSet["value"] = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
var desc *ExtensionDesc
|
var desc *ExtensionDesc
|
||||||
// This could be faster, but it's functional.
|
// This could be faster, but it's functional.
|
||||||
// TODO: Do something smarter than a linear scan.
|
// TODO: Do something smarter than a linear scan.
|
||||||
for _, d := range RegisteredExtensions(reflect.New(st).Interface().(Message)) {
|
for _, d := range RegisteredExtensions(reflect.New(st).Interface().(Message)) {
|
||||||
if d.Name == tok.value {
|
if d.Name == extName {
|
||||||
desc = d
|
desc = d
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if desc == nil {
|
if desc == nil {
|
||||||
return p.errorf("unrecognized extension %q", tok.value)
|
return p.errorf("unrecognized extension %q", extName)
|
||||||
}
|
|
||||||
// Check the extension terminator.
|
|
||||||
tok = p.next()
|
|
||||||
if tok.err != nil {
|
|
||||||
return tok.err
|
|
||||||
}
|
|
||||||
if tok.value != "]" {
|
|
||||||
return p.errorf("unrecognized extension terminator %q", tok.value)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
props := &Properties{}
|
props := &Properties{}
|
||||||
|
@ -550,7 +598,11 @@ func (p *textParser) readStruct(sv reflect.Value, terminator string) error {
|
||||||
props = oop.Prop
|
props = oop.Prop
|
||||||
nv := reflect.New(oop.Type.Elem())
|
nv := reflect.New(oop.Type.Elem())
|
||||||
dst = nv.Elem().Field(0)
|
dst = nv.Elem().Field(0)
|
||||||
sv.Field(oop.Field).Set(nv)
|
field := sv.Field(oop.Field)
|
||||||
|
if !field.IsNil() {
|
||||||
|
return p.errorf("field '%s' would overwrite already parsed oneof '%s'", name, sv.Type().Field(oop.Field).Name)
|
||||||
|
}
|
||||||
|
field.Set(nv)
|
||||||
}
|
}
|
||||||
if !dst.IsValid() {
|
if !dst.IsValid() {
|
||||||
return p.errorf("unknown field name %q in %v", name, st)
|
return p.errorf("unknown field name %q in %v", name, st)
|
||||||
|
@ -657,6 +709,35 @@ func (p *textParser) readStruct(sv reflect.Value, terminator string) error {
|
||||||
return reqFieldErr
|
return reqFieldErr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// consumeExtName consumes extension name or expanded Any type URL and the
|
||||||
|
// following ']'. It returns the name or URL consumed.
|
||||||
|
func (p *textParser) consumeExtName() (string, error) {
|
||||||
|
tok := p.next()
|
||||||
|
if tok.err != nil {
|
||||||
|
return "", tok.err
|
||||||
|
}
|
||||||
|
|
||||||
|
// If extension name or type url is quoted, it's a single token.
|
||||||
|
if len(tok.value) > 2 && isQuote(tok.value[0]) && tok.value[len(tok.value)-1] == tok.value[0] {
|
||||||
|
name, err := unquoteC(tok.value[1:len(tok.value)-1], rune(tok.value[0]))
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return name, p.consumeToken("]")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Consume everything up to "]"
|
||||||
|
var parts []string
|
||||||
|
for tok.value != "]" {
|
||||||
|
parts = append(parts, tok.value)
|
||||||
|
tok = p.next()
|
||||||
|
if tok.err != nil {
|
||||||
|
return "", p.errorf("unrecognized type_url or extension name: %s", tok.err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(parts, ""), nil
|
||||||
|
}
|
||||||
|
|
||||||
// consumeOptionalSeparator consumes an optional semicolon or comma.
|
// consumeOptionalSeparator consumes an optional semicolon or comma.
|
||||||
// It is used in readStruct to provide backward compatibility.
|
// It is used in readStruct to provide backward compatibility.
|
||||||
func (p *textParser) consumeOptionalSeparator() error {
|
func (p *textParser) consumeOptionalSeparator() error {
|
||||||
|
@ -717,6 +798,80 @@ func (p *textParser) readAny(v reflect.Value, props *Properties) error {
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
if props.StdTime {
|
||||||
|
fv := v
|
||||||
|
p.back()
|
||||||
|
props.StdTime = false
|
||||||
|
tproto := ×tamp{}
|
||||||
|
err := p.readAny(reflect.ValueOf(tproto).Elem(), props)
|
||||||
|
props.StdTime = true
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
tim, err := timestampFromProto(tproto)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if props.Repeated {
|
||||||
|
t := reflect.TypeOf(v.Interface())
|
||||||
|
if t.Kind() == reflect.Slice {
|
||||||
|
if t.Elem().Kind() == reflect.Ptr {
|
||||||
|
ts := fv.Interface().([]*time.Time)
|
||||||
|
ts = append(ts, &tim)
|
||||||
|
fv.Set(reflect.ValueOf(ts))
|
||||||
|
return nil
|
||||||
|
} else {
|
||||||
|
ts := fv.Interface().([]time.Time)
|
||||||
|
ts = append(ts, tim)
|
||||||
|
fv.Set(reflect.ValueOf(ts))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if reflect.TypeOf(v.Interface()).Kind() == reflect.Ptr {
|
||||||
|
v.Set(reflect.ValueOf(&tim))
|
||||||
|
} else {
|
||||||
|
v.Set(reflect.Indirect(reflect.ValueOf(&tim)))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if props.StdDuration {
|
||||||
|
fv := v
|
||||||
|
p.back()
|
||||||
|
props.StdDuration = false
|
||||||
|
dproto := &duration{}
|
||||||
|
err := p.readAny(reflect.ValueOf(dproto).Elem(), props)
|
||||||
|
props.StdDuration = true
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dur, err := durationFromProto(dproto)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if props.Repeated {
|
||||||
|
t := reflect.TypeOf(v.Interface())
|
||||||
|
if t.Kind() == reflect.Slice {
|
||||||
|
if t.Elem().Kind() == reflect.Ptr {
|
||||||
|
ds := fv.Interface().([]*time.Duration)
|
||||||
|
ds = append(ds, &dur)
|
||||||
|
fv.Set(reflect.ValueOf(ds))
|
||||||
|
return nil
|
||||||
|
} else {
|
||||||
|
ds := fv.Interface().([]time.Duration)
|
||||||
|
ds = append(ds, dur)
|
||||||
|
fv.Set(reflect.ValueOf(ds))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if reflect.TypeOf(v.Interface()).Kind() == reflect.Ptr {
|
||||||
|
v.Set(reflect.ValueOf(&dur))
|
||||||
|
} else {
|
||||||
|
v.Set(reflect.Indirect(reflect.ValueOf(&dur)))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
switch fv := v; fv.Kind() {
|
switch fv := v; fv.Kind() {
|
||||||
case reflect.Slice:
|
case reflect.Slice:
|
||||||
at := v.Type()
|
at := v.Type()
|
||||||
|
@ -759,12 +914,12 @@ func (p *textParser) readAny(v reflect.Value, props *Properties) error {
|
||||||
fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem()))
|
fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem()))
|
||||||
return p.readAny(fv.Index(fv.Len()-1), props)
|
return p.readAny(fv.Index(fv.Len()-1), props)
|
||||||
case reflect.Bool:
|
case reflect.Bool:
|
||||||
// Either "true", "false", 1 or 0.
|
// true/1/t/True or false/f/0/False.
|
||||||
switch tok.value {
|
switch tok.value {
|
||||||
case "true", "1":
|
case "true", "1", "t", "True":
|
||||||
fv.SetBool(true)
|
fv.SetBool(true)
|
||||||
return nil
|
return nil
|
||||||
case "false", "0":
|
case "false", "0", "f", "False":
|
||||||
fv.SetBool(false)
|
fv.SetBool(false)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
113
vendor/github.com/gogo/protobuf/proto/timestamp.go
generated
vendored
Normal file
113
vendor/github.com/gogo/protobuf/proto/timestamp.go
generated
vendored
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
// Go support for Protocol Buffers - Google's data interchange format
|
||||||
|
//
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// https://github.com/golang/protobuf
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
// This file implements operations on google.protobuf.Timestamp.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Seconds field of the earliest valid Timestamp.
|
||||||
|
// This is time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC).Unix().
|
||||||
|
minValidSeconds = -62135596800
|
||||||
|
// Seconds field just after the latest valid Timestamp.
|
||||||
|
// This is time.Date(10000, 1, 1, 0, 0, 0, 0, time.UTC).Unix().
|
||||||
|
maxValidSeconds = 253402300800
|
||||||
|
)
|
||||||
|
|
||||||
|
// validateTimestamp determines whether a Timestamp is valid.
|
||||||
|
// A valid timestamp represents a time in the range
|
||||||
|
// [0001-01-01, 10000-01-01) and has a Nanos field
|
||||||
|
// in the range [0, 1e9).
|
||||||
|
//
|
||||||
|
// If the Timestamp is valid, validateTimestamp returns nil.
|
||||||
|
// Otherwise, it returns an error that describes
|
||||||
|
// the problem.
|
||||||
|
//
|
||||||
|
// Every valid Timestamp can be represented by a time.Time, but the converse is not true.
|
||||||
|
func validateTimestamp(ts *timestamp) error {
|
||||||
|
if ts == nil {
|
||||||
|
return errors.New("timestamp: nil Timestamp")
|
||||||
|
}
|
||||||
|
if ts.Seconds < minValidSeconds {
|
||||||
|
return fmt.Errorf("timestamp: %#v before 0001-01-01", ts)
|
||||||
|
}
|
||||||
|
if ts.Seconds >= maxValidSeconds {
|
||||||
|
return fmt.Errorf("timestamp: %#v after 10000-01-01", ts)
|
||||||
|
}
|
||||||
|
if ts.Nanos < 0 || ts.Nanos >= 1e9 {
|
||||||
|
return fmt.Errorf("timestamp: %#v: nanos not in range [0, 1e9)", ts)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TimestampFromProto converts a google.protobuf.Timestamp proto to a time.Time.
|
||||||
|
// It returns an error if the argument is invalid.
|
||||||
|
//
|
||||||
|
// Unlike most Go functions, if Timestamp returns an error, the first return value
|
||||||
|
// is not the zero time.Time. Instead, it is the value obtained from the
|
||||||
|
// time.Unix function when passed the contents of the Timestamp, in the UTC
|
||||||
|
// locale. This may or may not be a meaningful time; many invalid Timestamps
|
||||||
|
// do map to valid time.Times.
|
||||||
|
//
|
||||||
|
// A nil Timestamp returns an error. The first return value in that case is
|
||||||
|
// undefined.
|
||||||
|
func timestampFromProto(ts *timestamp) (time.Time, error) {
|
||||||
|
// Don't return the zero value on error, because corresponds to a valid
|
||||||
|
// timestamp. Instead return whatever time.Unix gives us.
|
||||||
|
var t time.Time
|
||||||
|
if ts == nil {
|
||||||
|
t = time.Unix(0, 0).UTC() // treat nil like the empty Timestamp
|
||||||
|
} else {
|
||||||
|
t = time.Unix(ts.Seconds, int64(ts.Nanos)).UTC()
|
||||||
|
}
|
||||||
|
return t, validateTimestamp(ts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TimestampProto converts the time.Time to a google.protobuf.Timestamp proto.
|
||||||
|
// It returns an error if the resulting Timestamp is invalid.
|
||||||
|
func timestampProto(t time.Time) (*timestamp, error) {
|
||||||
|
seconds := t.Unix()
|
||||||
|
nanos := int32(t.Sub(time.Unix(seconds, 0)))
|
||||||
|
ts := ×tamp{
|
||||||
|
Seconds: seconds,
|
||||||
|
Nanos: nanos,
|
||||||
|
}
|
||||||
|
if err := validateTimestamp(ts); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ts, nil
|
||||||
|
}
|
229
vendor/github.com/gogo/protobuf/proto/timestamp_gogo.go
generated
vendored
Normal file
229
vendor/github.com/gogo/protobuf/proto/timestamp_gogo.go
generated
vendored
Normal file
|
@ -0,0 +1,229 @@
|
||||||
|
// Protocol Buffers for Go with Gadgets
|
||||||
|
//
|
||||||
|
// Copyright (c) 2016, The GoGo Authors. All rights reserved.
|
||||||
|
// http://github.com/gogo/protobuf
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var timeType = reflect.TypeOf((*time.Time)(nil)).Elem()
|
||||||
|
|
||||||
|
type timestamp struct {
|
||||||
|
Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"`
|
||||||
|
Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *timestamp) Reset() { *m = timestamp{} }
|
||||||
|
func (*timestamp) ProtoMessage() {}
|
||||||
|
func (*timestamp) String() string { return "timestamp<string>" }
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterType((*timestamp)(nil), "gogo.protobuf.proto.timestamp")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) decTimestamp() (time.Time, error) {
|
||||||
|
b, err := o.DecodeRawBytes(true)
|
||||||
|
if err != nil {
|
||||||
|
return time.Time{}, err
|
||||||
|
}
|
||||||
|
tproto := ×tamp{}
|
||||||
|
if err := Unmarshal(b, tproto); err != nil {
|
||||||
|
return time.Time{}, err
|
||||||
|
}
|
||||||
|
return timestampFromProto(tproto)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) dec_time(p *Properties, base structPointer) error {
|
||||||
|
t, err := o.decTimestamp()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
setPtrCustomType(base, p.field, &t)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) dec_ref_time(p *Properties, base structPointer) error {
|
||||||
|
t, err := o.decTimestamp()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
setCustomType(base, p.field, &t)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) dec_slice_time(p *Properties, base structPointer) error {
|
||||||
|
t, err := o.decTimestamp()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
newBas := appendStructPointer(base, p.field, reflect.SliceOf(reflect.PtrTo(timeType)))
|
||||||
|
var zero field
|
||||||
|
setPtrCustomType(newBas, zero, &t)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) dec_slice_ref_time(p *Properties, base structPointer) error {
|
||||||
|
t, err := o.decTimestamp()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
newBas := appendStructPointer(base, p.field, reflect.SliceOf(timeType))
|
||||||
|
var zero field
|
||||||
|
setCustomType(newBas, zero, &t)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func size_time(p *Properties, base structPointer) (n int) {
|
||||||
|
structp := structPointer_GetStructPointer(base, p.field)
|
||||||
|
if structPointer_IsNil(structp) {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
tim := structPointer_Interface(structp, timeType).(*time.Time)
|
||||||
|
t, err := timestampProto(*tim)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
size := Size(t)
|
||||||
|
return size + sizeVarint(uint64(size)) + len(p.tagcode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) enc_time(p *Properties, base structPointer) error {
|
||||||
|
structp := structPointer_GetStructPointer(base, p.field)
|
||||||
|
if structPointer_IsNil(structp) {
|
||||||
|
return ErrNil
|
||||||
|
}
|
||||||
|
tim := structPointer_Interface(structp, timeType).(*time.Time)
|
||||||
|
t, err := timestampProto(*tim)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
data, err := Marshal(t)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
o.buf = append(o.buf, p.tagcode...)
|
||||||
|
o.EncodeRawBytes(data)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func size_ref_time(p *Properties, base structPointer) (n int) {
|
||||||
|
tim := structPointer_InterfaceAt(base, p.field, timeType).(*time.Time)
|
||||||
|
t, err := timestampProto(*tim)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
size := Size(t)
|
||||||
|
return size + sizeVarint(uint64(size)) + len(p.tagcode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) enc_ref_time(p *Properties, base structPointer) error {
|
||||||
|
tim := structPointer_InterfaceAt(base, p.field, timeType).(*time.Time)
|
||||||
|
t, err := timestampProto(*tim)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
data, err := Marshal(t)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
o.buf = append(o.buf, p.tagcode...)
|
||||||
|
o.EncodeRawBytes(data)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func size_slice_time(p *Properties, base structPointer) (n int) {
|
||||||
|
ptims := structPointer_InterfaceAt(base, p.field, reflect.SliceOf(reflect.PtrTo(timeType))).(*[]*time.Time)
|
||||||
|
tims := *ptims
|
||||||
|
for i := 0; i < len(tims); i++ {
|
||||||
|
if tims[i] == nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
tproto, err := timestampProto(*tims[i])
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
size := Size(tproto)
|
||||||
|
n += len(p.tagcode) + size + sizeVarint(uint64(size))
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) enc_slice_time(p *Properties, base structPointer) error {
|
||||||
|
ptims := structPointer_InterfaceAt(base, p.field, reflect.SliceOf(reflect.PtrTo(timeType))).(*[]*time.Time)
|
||||||
|
tims := *ptims
|
||||||
|
for i := 0; i < len(tims); i++ {
|
||||||
|
if tims[i] == nil {
|
||||||
|
return errRepeatedHasNil
|
||||||
|
}
|
||||||
|
tproto, err := timestampProto(*tims[i])
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
data, err := Marshal(tproto)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
o.buf = append(o.buf, p.tagcode...)
|
||||||
|
o.EncodeRawBytes(data)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func size_slice_ref_time(p *Properties, base structPointer) (n int) {
|
||||||
|
ptims := structPointer_InterfaceAt(base, p.field, reflect.SliceOf(timeType)).(*[]time.Time)
|
||||||
|
tims := *ptims
|
||||||
|
for i := 0; i < len(tims); i++ {
|
||||||
|
tproto, err := timestampProto(tims[i])
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
size := Size(tproto)
|
||||||
|
n += len(p.tagcode) + size + sizeVarint(uint64(size))
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *Buffer) enc_slice_ref_time(p *Properties, base structPointer) error {
|
||||||
|
ptims := structPointer_InterfaceAt(base, p.field, reflect.SliceOf(timeType)).(*[]time.Time)
|
||||||
|
tims := *ptims
|
||||||
|
for i := 0; i < len(tims); i++ {
|
||||||
|
tproto, err := timestampProto(tims[i])
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
data, err := Marshal(tproto)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
o.buf = append(o.buf, p.tagcode...)
|
||||||
|
o.EncodeRawBytes(data)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
139
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/any.proto
generated
vendored
Normal file
139
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/any.proto
generated
vendored
Normal file
|
@ -0,0 +1,139 @@
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package google.protobuf;
|
||||||
|
|
||||||
|
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||||
|
option go_package = "types";
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "AnyProto";
|
||||||
|
option java_multiple_files = true;
|
||||||
|
option objc_class_prefix = "GPB";
|
||||||
|
|
||||||
|
// `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||||
|
// URL that describes the type of the serialized message.
|
||||||
|
//
|
||||||
|
// Protobuf library provides support to pack/unpack Any values in the form
|
||||||
|
// of utility functions or additional generated methods of the Any type.
|
||||||
|
//
|
||||||
|
// Example 1: Pack and unpack a message in C++.
|
||||||
|
//
|
||||||
|
// Foo foo = ...;
|
||||||
|
// Any any;
|
||||||
|
// any.PackFrom(foo);
|
||||||
|
// ...
|
||||||
|
// if (any.UnpackTo(&foo)) {
|
||||||
|
// ...
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Example 2: Pack and unpack a message in Java.
|
||||||
|
//
|
||||||
|
// Foo foo = ...;
|
||||||
|
// Any any = Any.pack(foo);
|
||||||
|
// ...
|
||||||
|
// if (any.is(Foo.class)) {
|
||||||
|
// foo = any.unpack(Foo.class);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Example 3: Pack and unpack a message in Python.
|
||||||
|
//
|
||||||
|
// foo = Foo(...)
|
||||||
|
// any = Any()
|
||||||
|
// any.Pack(foo)
|
||||||
|
// ...
|
||||||
|
// if any.Is(Foo.DESCRIPTOR):
|
||||||
|
// any.Unpack(foo)
|
||||||
|
// ...
|
||||||
|
//
|
||||||
|
// The pack methods provided by protobuf library will by default use
|
||||||
|
// 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||||
|
// methods only use the fully qualified type name after the last '/'
|
||||||
|
// in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||||
|
// name "y.z".
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// JSON
|
||||||
|
// ====
|
||||||
|
// The JSON representation of an `Any` value uses the regular
|
||||||
|
// representation of the deserialized, embedded message, with an
|
||||||
|
// additional field `@type` which contains the type URL. Example:
|
||||||
|
//
|
||||||
|
// package google.profile;
|
||||||
|
// message Person {
|
||||||
|
// string first_name = 1;
|
||||||
|
// string last_name = 2;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// {
|
||||||
|
// "@type": "type.googleapis.com/google.profile.Person",
|
||||||
|
// "firstName": <string>,
|
||||||
|
// "lastName": <string>
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// If the embedded message type is well-known and has a custom JSON
|
||||||
|
// representation, that representation will be embedded adding a field
|
||||||
|
// `value` which holds the custom JSON in addition to the `@type`
|
||||||
|
// field. Example (for message [google.protobuf.Duration][]):
|
||||||
|
//
|
||||||
|
// {
|
||||||
|
// "@type": "type.googleapis.com/google.protobuf.Duration",
|
||||||
|
// "value": "1.212s"
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
message Any {
|
||||||
|
// A URL/resource name whose content describes the type of the
|
||||||
|
// serialized protocol buffer message.
|
||||||
|
//
|
||||||
|
// For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||||
|
// following restrictions and interpretations apply:
|
||||||
|
//
|
||||||
|
// * If no scheme is provided, `https` is assumed.
|
||||||
|
// * The last segment of the URL's path must represent the fully
|
||||||
|
// qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||||
|
// The name should be in a canonical form (e.g., leading "." is
|
||||||
|
// not accepted).
|
||||||
|
// * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||||
|
// value in binary format, or produce an error.
|
||||||
|
// * Applications are allowed to cache lookup results based on the
|
||||||
|
// URL, or have them precompiled into a binary to avoid any
|
||||||
|
// lookup. Therefore, binary compatibility needs to be preserved
|
||||||
|
// on changes to types. (Use versioned type names to manage
|
||||||
|
// breaking changes.)
|
||||||
|
//
|
||||||
|
// Schemes other than `http`, `https` (or the empty scheme) might be
|
||||||
|
// used with implementation specific semantics.
|
||||||
|
//
|
||||||
|
string type_url = 1;
|
||||||
|
|
||||||
|
// Must be a valid serialized protocol buffer of the above specified type.
|
||||||
|
bytes value = 2;
|
||||||
|
}
|
13
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/compiler/plugin.proto
generated
vendored
13
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/compiler/plugin.proto
generated
vendored
|
@ -53,6 +53,16 @@ option go_package = "plugin_go";
|
||||||
|
|
||||||
import "google/protobuf/descriptor.proto";
|
import "google/protobuf/descriptor.proto";
|
||||||
|
|
||||||
|
// The version number of protocol compiler.
|
||||||
|
message Version {
|
||||||
|
optional int32 major = 1;
|
||||||
|
optional int32 minor = 2;
|
||||||
|
optional int32 patch = 3;
|
||||||
|
// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
|
||||||
|
// be empty for mainline stable releases.
|
||||||
|
optional string suffix = 4;
|
||||||
|
}
|
||||||
|
|
||||||
// An encoded CodeGeneratorRequest is written to the plugin's stdin.
|
// An encoded CodeGeneratorRequest is written to the plugin's stdin.
|
||||||
message CodeGeneratorRequest {
|
message CodeGeneratorRequest {
|
||||||
// The .proto files that were explicitly listed on the command-line. The
|
// The .proto files that were explicitly listed on the command-line. The
|
||||||
|
@ -75,6 +85,9 @@ message CodeGeneratorRequest {
|
||||||
// is not similarly optimized on protoc's end -- it will store all fields in
|
// is not similarly optimized on protoc's end -- it will store all fields in
|
||||||
// memory at once before sending them to the plugin.
|
// memory at once before sending them to the plugin.
|
||||||
repeated FileDescriptorProto proto_file = 15;
|
repeated FileDescriptorProto proto_file = 15;
|
||||||
|
|
||||||
|
// The version number of protocol compiler.
|
||||||
|
optional Version compiler_version = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
// The plugin writes an encoded CodeGeneratorResponse to stdout.
|
// The plugin writes an encoded CodeGeneratorResponse to stdout.
|
||||||
|
|
87
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/descriptor.proto
generated
vendored
87
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/descriptor.proto
generated
vendored
|
@ -139,7 +139,11 @@ message FieldDescriptorProto {
|
||||||
TYPE_FIXED32 = 7;
|
TYPE_FIXED32 = 7;
|
||||||
TYPE_BOOL = 8;
|
TYPE_BOOL = 8;
|
||||||
TYPE_STRING = 9;
|
TYPE_STRING = 9;
|
||||||
TYPE_GROUP = 10; // Tag-delimited aggregate.
|
// Tag-delimited aggregate.
|
||||||
|
// Group type is deprecated and not supported in proto3. However, Proto3
|
||||||
|
// implementations should still be able to parse the group wire format and
|
||||||
|
// treat group fields as unknown fields.
|
||||||
|
TYPE_GROUP = 10;
|
||||||
TYPE_MESSAGE = 11; // Length-delimited aggregate.
|
TYPE_MESSAGE = 11; // Length-delimited aggregate.
|
||||||
|
|
||||||
// New in version 2.
|
// New in version 2.
|
||||||
|
@ -157,7 +161,6 @@ message FieldDescriptorProto {
|
||||||
LABEL_OPTIONAL = 1;
|
LABEL_OPTIONAL = 1;
|
||||||
LABEL_REQUIRED = 2;
|
LABEL_REQUIRED = 2;
|
||||||
LABEL_REPEATED = 3;
|
LABEL_REPEATED = 3;
|
||||||
// TODO(sanjay): Should we add LABEL_MAP?
|
|
||||||
};
|
};
|
||||||
|
|
||||||
optional string name = 1;
|
optional string name = 1;
|
||||||
|
@ -202,6 +205,7 @@ message FieldDescriptorProto {
|
||||||
// Describes a oneof.
|
// Describes a oneof.
|
||||||
message OneofDescriptorProto {
|
message OneofDescriptorProto {
|
||||||
optional string name = 1;
|
optional string name = 1;
|
||||||
|
optional OneofOptions options = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Describes an enum type.
|
// Describes an enum type.
|
||||||
|
@ -304,19 +308,8 @@ message FileOptions {
|
||||||
// top-level extensions defined in the file.
|
// top-level extensions defined in the file.
|
||||||
optional bool java_multiple_files = 10 [default=false];
|
optional bool java_multiple_files = 10 [default=false];
|
||||||
|
|
||||||
// If set true, then the Java code generator will generate equals() and
|
// This option does nothing.
|
||||||
// hashCode() methods for all messages defined in the .proto file.
|
optional bool java_generate_equals_and_hash = 20 [deprecated=true];
|
||||||
// This increases generated code size, potentially substantially for large
|
|
||||||
// protos, which may harm a memory-constrained application.
|
|
||||||
// - In the full runtime this is a speed optimization, as the
|
|
||||||
// AbstractMessage base class includes reflection-based implementations of
|
|
||||||
// these methods.
|
|
||||||
// - In the lite runtime, setting this option changes the semantics of
|
|
||||||
// equals() and hashCode() to more closely match those of the full runtime;
|
|
||||||
// the generated methods compute their results based on field values rather
|
|
||||||
// than object identity. (Implementations should not assume that hashcodes
|
|
||||||
// will be consistent across runtimes or versions of the protocol compiler.)
|
|
||||||
optional bool java_generate_equals_and_hash = 20 [default=false];
|
|
||||||
|
|
||||||
// If set true, then the Java2 code generator will generate code that
|
// If set true, then the Java2 code generator will generate code that
|
||||||
// throws an exception whenever an attempt is made to assign a non-UTF-8
|
// throws an exception whenever an attempt is made to assign a non-UTF-8
|
||||||
|
@ -377,15 +370,19 @@ message FileOptions {
|
||||||
// Namespace for generated classes; defaults to the package.
|
// Namespace for generated classes; defaults to the package.
|
||||||
optional string csharp_namespace = 37;
|
optional string csharp_namespace = 37;
|
||||||
|
|
||||||
// Whether the nano proto compiler should generate in the deprecated non-nano
|
// By default Swift generators will take the proto package and CamelCase it
|
||||||
// suffixed package.
|
// replacing '.' with underscore and use that to prefix the types/symbols
|
||||||
optional bool javanano_use_deprecated_package = 38;
|
// defined. When this options is provided, they will use this value instead
|
||||||
|
// to prefix the types/symbols defined.
|
||||||
|
optional string swift_prefix = 39;
|
||||||
|
|
||||||
// The parser stores options it doesn't recognize here. See above.
|
// The parser stores options it doesn't recognize here. See above.
|
||||||
repeated UninterpretedOption uninterpreted_option = 999;
|
repeated UninterpretedOption uninterpreted_option = 999;
|
||||||
|
|
||||||
// Clients can define custom options in extensions of this message. See above.
|
// Clients can define custom options in extensions of this message. See above.
|
||||||
extensions 1000 to max;
|
extensions 1000 to max;
|
||||||
|
|
||||||
|
//reserved 38;
|
||||||
}
|
}
|
||||||
|
|
||||||
message MessageOptions {
|
message MessageOptions {
|
||||||
|
@ -443,6 +440,9 @@ message MessageOptions {
|
||||||
// parser.
|
// parser.
|
||||||
optional bool map_entry = 7;
|
optional bool map_entry = 7;
|
||||||
|
|
||||||
|
//reserved 8; // javalite_serializable
|
||||||
|
|
||||||
|
|
||||||
// The parser stores options it doesn't recognize here. See above.
|
// The parser stores options it doesn't recognize here. See above.
|
||||||
repeated UninterpretedOption uninterpreted_option = 999;
|
repeated UninterpretedOption uninterpreted_option = 999;
|
||||||
|
|
||||||
|
@ -471,7 +471,6 @@ message FieldOptions {
|
||||||
// false will avoid using packed encoding.
|
// false will avoid using packed encoding.
|
||||||
optional bool packed = 2;
|
optional bool packed = 2;
|
||||||
|
|
||||||
|
|
||||||
// The jstype option determines the JavaScript type used for values of the
|
// The jstype option determines the JavaScript type used for values of the
|
||||||
// field. The option is permitted only for 64 bit integral and fixed types
|
// field. The option is permitted only for 64 bit integral and fixed types
|
||||||
// (int64, uint64, sint64, fixed64, sfixed64). By default these types are
|
// (int64, uint64, sint64, fixed64, sfixed64). By default these types are
|
||||||
|
@ -512,7 +511,7 @@ message FieldOptions {
|
||||||
//
|
//
|
||||||
//
|
//
|
||||||
// Note that implementations may choose not to check required fields within
|
// Note that implementations may choose not to check required fields within
|
||||||
// a lazy sub-message. That is, calling IsInitialized() on the outher message
|
// a lazy sub-message. That is, calling IsInitialized() on the outer message
|
||||||
// may return true even if the inner message has missing required fields.
|
// may return true even if the inner message has missing required fields.
|
||||||
// This is necessary because otherwise the inner message would have to be
|
// This is necessary because otherwise the inner message would have to be
|
||||||
// parsed in order to perform the check, defeating the purpose of lazy
|
// parsed in order to perform the check, defeating the purpose of lazy
|
||||||
|
@ -533,6 +532,16 @@ message FieldOptions {
|
||||||
optional bool weak = 10 [default=false];
|
optional bool weak = 10 [default=false];
|
||||||
|
|
||||||
|
|
||||||
|
// The parser stores options it doesn't recognize here. See above.
|
||||||
|
repeated UninterpretedOption uninterpreted_option = 999;
|
||||||
|
|
||||||
|
// Clients can define custom options in extensions of this message. See above.
|
||||||
|
extensions 1000 to max;
|
||||||
|
|
||||||
|
//reserved 4; // removed jtype
|
||||||
|
}
|
||||||
|
|
||||||
|
message OneofOptions {
|
||||||
// The parser stores options it doesn't recognize here. See above.
|
// The parser stores options it doesn't recognize here. See above.
|
||||||
repeated UninterpretedOption uninterpreted_option = 999;
|
repeated UninterpretedOption uninterpreted_option = 999;
|
||||||
|
|
||||||
|
@ -552,6 +561,7 @@ message EnumOptions {
|
||||||
// is a formalization for deprecating enums.
|
// is a formalization for deprecating enums.
|
||||||
optional bool deprecated = 3 [default=false];
|
optional bool deprecated = 3 [default=false];
|
||||||
|
|
||||||
|
|
||||||
// The parser stores options it doesn't recognize here. See above.
|
// The parser stores options it doesn't recognize here. See above.
|
||||||
repeated UninterpretedOption uninterpreted_option = 999;
|
repeated UninterpretedOption uninterpreted_option = 999;
|
||||||
|
|
||||||
|
@ -606,6 +616,17 @@ message MethodOptions {
|
||||||
// this is a formalization for deprecating methods.
|
// this is a formalization for deprecating methods.
|
||||||
optional bool deprecated = 33 [default=false];
|
optional bool deprecated = 33 [default=false];
|
||||||
|
|
||||||
|
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
|
||||||
|
// or neither? HTTP based RPC implementation may choose GET verb for safe
|
||||||
|
// methods, and PUT verb for idempotent methods instead of the default POST.
|
||||||
|
enum IdempotencyLevel {
|
||||||
|
IDEMPOTENCY_UNKNOWN = 0;
|
||||||
|
NO_SIDE_EFFECTS = 1; // implies idempotent
|
||||||
|
IDEMPOTENT = 2; // idempotent, but may have side effects
|
||||||
|
}
|
||||||
|
optional IdempotencyLevel idempotency_level =
|
||||||
|
34 [default=IDEMPOTENCY_UNKNOWN];
|
||||||
|
|
||||||
// The parser stores options it doesn't recognize here. See above.
|
// The parser stores options it doesn't recognize here. See above.
|
||||||
repeated UninterpretedOption uninterpreted_option = 999;
|
repeated UninterpretedOption uninterpreted_option = 999;
|
||||||
|
|
||||||
|
@ -777,3 +798,29 @@ message SourceCodeInfo {
|
||||||
repeated string leading_detached_comments = 6;
|
repeated string leading_detached_comments = 6;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Describes the relationship between generated code and its original source
|
||||||
|
// file. A GeneratedCodeInfo message is associated with only one generated
|
||||||
|
// source file, but may contain references to different source .proto files.
|
||||||
|
message GeneratedCodeInfo {
|
||||||
|
// An Annotation connects some span of text in generated code to an element
|
||||||
|
// of its generating .proto file.
|
||||||
|
repeated Annotation annotation = 1;
|
||||||
|
message Annotation {
|
||||||
|
// Identifies the element in the original source .proto file. This field
|
||||||
|
// is formatted the same as SourceCodeInfo.Location.path.
|
||||||
|
repeated int32 path = 1 [packed=true];
|
||||||
|
|
||||||
|
// Identifies the filesystem path to the original source .proto.
|
||||||
|
optional string source_file = 2;
|
||||||
|
|
||||||
|
// Identifies the starting offset in bytes in the generated code
|
||||||
|
// that relates to the identified object.
|
||||||
|
optional int32 begin = 3;
|
||||||
|
|
||||||
|
// Identifies the ending offset in bytes in the generated code that
|
||||||
|
// relates to the identified offset. The end offset should be one past
|
||||||
|
// the last relevant byte (so the length of the text = end - begin).
|
||||||
|
optional int32 end = 4;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
104
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/duration.proto
generated
vendored
Normal file
104
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/duration.proto
generated
vendored
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package google.protobuf;
|
||||||
|
|
||||||
|
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||||
|
option cc_enable_arenas = true;
|
||||||
|
option go_package = "types";
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "DurationProto";
|
||||||
|
option java_multiple_files = true;
|
||||||
|
option objc_class_prefix = "GPB";
|
||||||
|
|
||||||
|
// A Duration represents a signed, fixed-length span of time represented
|
||||||
|
// as a count of seconds and fractions of seconds at nanosecond
|
||||||
|
// resolution. It is independent of any calendar and concepts like "day"
|
||||||
|
// or "month". It is related to Timestamp in that the difference between
|
||||||
|
// two Timestamp values is a Duration and it can be added or subtracted
|
||||||
|
// from a Timestamp. Range is approximately +-10,000 years.
|
||||||
|
//
|
||||||
|
// Example 1: Compute Duration from two Timestamps in pseudo code.
|
||||||
|
//
|
||||||
|
// Timestamp start = ...;
|
||||||
|
// Timestamp end = ...;
|
||||||
|
// Duration duration = ...;
|
||||||
|
//
|
||||||
|
// duration.seconds = end.seconds - start.seconds;
|
||||||
|
// duration.nanos = end.nanos - start.nanos;
|
||||||
|
//
|
||||||
|
// if (duration.seconds < 0 && duration.nanos > 0) {
|
||||||
|
// duration.seconds += 1;
|
||||||
|
// duration.nanos -= 1000000000;
|
||||||
|
// } else if (durations.seconds > 0 && duration.nanos < 0) {
|
||||||
|
// duration.seconds -= 1;
|
||||||
|
// duration.nanos += 1000000000;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
|
||||||
|
//
|
||||||
|
// Timestamp start = ...;
|
||||||
|
// Duration duration = ...;
|
||||||
|
// Timestamp end = ...;
|
||||||
|
//
|
||||||
|
// end.seconds = start.seconds + duration.seconds;
|
||||||
|
// end.nanos = start.nanos + duration.nanos;
|
||||||
|
//
|
||||||
|
// if (end.nanos < 0) {
|
||||||
|
// end.seconds -= 1;
|
||||||
|
// end.nanos += 1000000000;
|
||||||
|
// } else if (end.nanos >= 1000000000) {
|
||||||
|
// end.seconds += 1;
|
||||||
|
// end.nanos -= 1000000000;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Example 3: Compute Duration from datetime.timedelta in Python.
|
||||||
|
//
|
||||||
|
// td = datetime.timedelta(days=3, minutes=10)
|
||||||
|
// duration = Duration()
|
||||||
|
// duration.FromTimedelta(td)
|
||||||
|
//
|
||||||
|
//
|
||||||
|
message Duration {
|
||||||
|
|
||||||
|
// Signed seconds of the span of time. Must be from -315,576,000,000
|
||||||
|
// to +315,576,000,000 inclusive.
|
||||||
|
int64 seconds = 1;
|
||||||
|
|
||||||
|
// Signed fractions of a second at nanosecond resolution of the span
|
||||||
|
// of time. Durations less than one second are represented with a 0
|
||||||
|
// `seconds` field and a positive or negative `nanos` field. For durations
|
||||||
|
// of one second or more, a non-zero value for the `nanos` field must be
|
||||||
|
// of the same sign as the `seconds` field. Must be from -999,999,999
|
||||||
|
// to +999,999,999 inclusive.
|
||||||
|
int32 nanos = 2;
|
||||||
|
}
|
52
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/empty.proto
generated
vendored
Normal file
52
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/empty.proto
generated
vendored
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package google.protobuf;
|
||||||
|
|
||||||
|
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||||
|
option go_package = "types";
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "EmptyProto";
|
||||||
|
option java_multiple_files = true;
|
||||||
|
option objc_class_prefix = "GPB";
|
||||||
|
option cc_enable_arenas = true;
|
||||||
|
|
||||||
|
// A generic empty message that you can re-use to avoid defining duplicated
|
||||||
|
// empty messages in your APIs. A typical example is to use it as the request
|
||||||
|
// or the response type of an API method. For instance:
|
||||||
|
//
|
||||||
|
// service Foo {
|
||||||
|
// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// The JSON representation for `Empty` is empty JSON object `{}`.
|
||||||
|
message Empty {}
|
246
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/field_mask.proto
generated
vendored
Normal file
246
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/field_mask.proto
generated
vendored
Normal file
|
@ -0,0 +1,246 @@
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package google.protobuf;
|
||||||
|
|
||||||
|
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "FieldMaskProto";
|
||||||
|
option java_multiple_files = true;
|
||||||
|
option objc_class_prefix = "GPB";
|
||||||
|
option go_package = "types";
|
||||||
|
|
||||||
|
// `FieldMask` represents a set of symbolic field paths, for example:
|
||||||
|
//
|
||||||
|
// paths: "f.a"
|
||||||
|
// paths: "f.b.d"
|
||||||
|
//
|
||||||
|
// Here `f` represents a field in some root message, `a` and `b`
|
||||||
|
// fields in the message found in `f`, and `d` a field found in the
|
||||||
|
// message in `f.b`.
|
||||||
|
//
|
||||||
|
// Field masks are used to specify a subset of fields that should be
|
||||||
|
// returned by a get operation or modified by an update operation.
|
||||||
|
// Field masks also have a custom JSON encoding (see below).
|
||||||
|
//
|
||||||
|
// # Field Masks in Projections
|
||||||
|
//
|
||||||
|
// When used in the context of a projection, a response message or
|
||||||
|
// sub-message is filtered by the API to only contain those fields as
|
||||||
|
// specified in the mask. For example, if the mask in the previous
|
||||||
|
// example is applied to a response message as follows:
|
||||||
|
//
|
||||||
|
// f {
|
||||||
|
// a : 22
|
||||||
|
// b {
|
||||||
|
// d : 1
|
||||||
|
// x : 2
|
||||||
|
// }
|
||||||
|
// y : 13
|
||||||
|
// }
|
||||||
|
// z: 8
|
||||||
|
//
|
||||||
|
// The result will not contain specific values for fields x,y and z
|
||||||
|
// (their value will be set to the default, and omitted in proto text
|
||||||
|
// output):
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// f {
|
||||||
|
// a : 22
|
||||||
|
// b {
|
||||||
|
// d : 1
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// A repeated field is not allowed except at the last position of a
|
||||||
|
// paths string.
|
||||||
|
//
|
||||||
|
// If a FieldMask object is not present in a get operation, the
|
||||||
|
// operation applies to all fields (as if a FieldMask of all fields
|
||||||
|
// had been specified).
|
||||||
|
//
|
||||||
|
// Note that a field mask does not necessarily apply to the
|
||||||
|
// top-level response message. In case of a REST get operation, the
|
||||||
|
// field mask applies directly to the response, but in case of a REST
|
||||||
|
// list operation, the mask instead applies to each individual message
|
||||||
|
// in the returned resource list. In case of a REST custom method,
|
||||||
|
// other definitions may be used. Where the mask applies will be
|
||||||
|
// clearly documented together with its declaration in the API. In
|
||||||
|
// any case, the effect on the returned resource/resources is required
|
||||||
|
// behavior for APIs.
|
||||||
|
//
|
||||||
|
// # Field Masks in Update Operations
|
||||||
|
//
|
||||||
|
// A field mask in update operations specifies which fields of the
|
||||||
|
// targeted resource are going to be updated. The API is required
|
||||||
|
// to only change the values of the fields as specified in the mask
|
||||||
|
// and leave the others untouched. If a resource is passed in to
|
||||||
|
// describe the updated values, the API ignores the values of all
|
||||||
|
// fields not covered by the mask.
|
||||||
|
//
|
||||||
|
// If a repeated field is specified for an update operation, the existing
|
||||||
|
// repeated values in the target resource will be overwritten by the new values.
|
||||||
|
// Note that a repeated field is only allowed in the last position of a `paths`
|
||||||
|
// string.
|
||||||
|
//
|
||||||
|
// If a sub-message is specified in the last position of the field mask for an
|
||||||
|
// update operation, then the existing sub-message in the target resource is
|
||||||
|
// overwritten. Given the target message:
|
||||||
|
//
|
||||||
|
// f {
|
||||||
|
// b {
|
||||||
|
// d : 1
|
||||||
|
// x : 2
|
||||||
|
// }
|
||||||
|
// c : 1
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// And an update message:
|
||||||
|
//
|
||||||
|
// f {
|
||||||
|
// b {
|
||||||
|
// d : 10
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// then if the field mask is:
|
||||||
|
//
|
||||||
|
// paths: "f.b"
|
||||||
|
//
|
||||||
|
// then the result will be:
|
||||||
|
//
|
||||||
|
// f {
|
||||||
|
// b {
|
||||||
|
// d : 10
|
||||||
|
// }
|
||||||
|
// c : 1
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// However, if the update mask was:
|
||||||
|
//
|
||||||
|
// paths: "f.b.d"
|
||||||
|
//
|
||||||
|
// then the result would be:
|
||||||
|
//
|
||||||
|
// f {
|
||||||
|
// b {
|
||||||
|
// d : 10
|
||||||
|
// x : 2
|
||||||
|
// }
|
||||||
|
// c : 1
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// In order to reset a field's value to the default, the field must
|
||||||
|
// be in the mask and set to the default value in the provided resource.
|
||||||
|
// Hence, in order to reset all fields of a resource, provide a default
|
||||||
|
// instance of the resource and set all fields in the mask, or do
|
||||||
|
// not provide a mask as described below.
|
||||||
|
//
|
||||||
|
// If a field mask is not present on update, the operation applies to
|
||||||
|
// all fields (as if a field mask of all fields has been specified).
|
||||||
|
// Note that in the presence of schema evolution, this may mean that
|
||||||
|
// fields the client does not know and has therefore not filled into
|
||||||
|
// the request will be reset to their default. If this is unwanted
|
||||||
|
// behavior, a specific service may require a client to always specify
|
||||||
|
// a field mask, producing an error if not.
|
||||||
|
//
|
||||||
|
// As with get operations, the location of the resource which
|
||||||
|
// describes the updated values in the request message depends on the
|
||||||
|
// operation kind. In any case, the effect of the field mask is
|
||||||
|
// required to be honored by the API.
|
||||||
|
//
|
||||||
|
// ## Considerations for HTTP REST
|
||||||
|
//
|
||||||
|
// The HTTP kind of an update operation which uses a field mask must
|
||||||
|
// be set to PATCH instead of PUT in order to satisfy HTTP semantics
|
||||||
|
// (PUT must only be used for full updates).
|
||||||
|
//
|
||||||
|
// # JSON Encoding of Field Masks
|
||||||
|
//
|
||||||
|
// In JSON, a field mask is encoded as a single string where paths are
|
||||||
|
// separated by a comma. Fields name in each path are converted
|
||||||
|
// to/from lower-camel naming conventions.
|
||||||
|
//
|
||||||
|
// As an example, consider the following message declarations:
|
||||||
|
//
|
||||||
|
// message Profile {
|
||||||
|
// User user = 1;
|
||||||
|
// Photo photo = 2;
|
||||||
|
// }
|
||||||
|
// message User {
|
||||||
|
// string display_name = 1;
|
||||||
|
// string address = 2;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// In proto a field mask for `Profile` may look as such:
|
||||||
|
//
|
||||||
|
// mask {
|
||||||
|
// paths: "user.display_name"
|
||||||
|
// paths: "photo"
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// In JSON, the same mask is represented as below:
|
||||||
|
//
|
||||||
|
// {
|
||||||
|
// mask: "user.displayName,photo"
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// # Field Masks and Oneof Fields
|
||||||
|
//
|
||||||
|
// Field masks treat fields in oneofs just as regular fields. Consider the
|
||||||
|
// following message:
|
||||||
|
//
|
||||||
|
// message SampleMessage {
|
||||||
|
// oneof test_oneof {
|
||||||
|
// string name = 4;
|
||||||
|
// SubMessage sub_message = 9;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// The field mask can be:
|
||||||
|
//
|
||||||
|
// mask {
|
||||||
|
// paths: "name"
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Or:
|
||||||
|
//
|
||||||
|
// mask {
|
||||||
|
// paths: "sub_message"
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Note that oneof type names ("test_oneof" in this case) cannot be used in
|
||||||
|
// paths.
|
||||||
|
message FieldMask {
|
||||||
|
// The set of field mask paths.
|
||||||
|
repeated string paths = 1;
|
||||||
|
}
|
96
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/struct.proto
generated
vendored
Normal file
96
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/struct.proto
generated
vendored
Normal file
|
@ -0,0 +1,96 @@
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package google.protobuf;
|
||||||
|
|
||||||
|
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||||
|
option cc_enable_arenas = true;
|
||||||
|
option go_package = "types";
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "StructProto";
|
||||||
|
option java_multiple_files = true;
|
||||||
|
option objc_class_prefix = "GPB";
|
||||||
|
|
||||||
|
|
||||||
|
// `Struct` represents a structured data value, consisting of fields
|
||||||
|
// which map to dynamically typed values. In some languages, `Struct`
|
||||||
|
// might be supported by a native representation. For example, in
|
||||||
|
// scripting languages like JS a struct is represented as an
|
||||||
|
// object. The details of that representation are described together
|
||||||
|
// with the proto support for the language.
|
||||||
|
//
|
||||||
|
// The JSON representation for `Struct` is JSON object.
|
||||||
|
message Struct {
|
||||||
|
// Unordered map of dynamically typed values.
|
||||||
|
map<string, Value> fields = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// `Value` represents a dynamically typed value which can be either
|
||||||
|
// null, a number, a string, a boolean, a recursive struct value, or a
|
||||||
|
// list of values. A producer of value is expected to set one of that
|
||||||
|
// variants, absence of any variant indicates an error.
|
||||||
|
//
|
||||||
|
// The JSON representation for `Value` is JSON value.
|
||||||
|
message Value {
|
||||||
|
// The kind of value.
|
||||||
|
oneof kind {
|
||||||
|
// Represents a null value.
|
||||||
|
NullValue null_value = 1;
|
||||||
|
// Represents a double value.
|
||||||
|
double number_value = 2;
|
||||||
|
// Represents a string value.
|
||||||
|
string string_value = 3;
|
||||||
|
// Represents a boolean value.
|
||||||
|
bool bool_value = 4;
|
||||||
|
// Represents a structured value.
|
||||||
|
Struct struct_value = 5;
|
||||||
|
// Represents a repeated `Value`.
|
||||||
|
ListValue list_value = 6;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// `NullValue` is a singleton enumeration to represent the null value for the
|
||||||
|
// `Value` type union.
|
||||||
|
//
|
||||||
|
// The JSON representation for `NullValue` is JSON `null`.
|
||||||
|
enum NullValue {
|
||||||
|
// Null value.
|
||||||
|
NULL_VALUE = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// `ListValue` is a wrapper around a repeated field of values.
|
||||||
|
//
|
||||||
|
// The JSON representation for `ListValue` is JSON array.
|
||||||
|
message ListValue {
|
||||||
|
// Repeated field of dynamically typed values.
|
||||||
|
repeated Value values = 1;
|
||||||
|
}
|
108
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/timestamp.proto
generated
vendored
Normal file
108
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/timestamp.proto
generated
vendored
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package google.protobuf;
|
||||||
|
|
||||||
|
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||||
|
option cc_enable_arenas = true;
|
||||||
|
option go_package = "types";
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "TimestampProto";
|
||||||
|
option java_multiple_files = true;
|
||||||
|
option objc_class_prefix = "GPB";
|
||||||
|
|
||||||
|
// A Timestamp represents a point in time independent of any time zone
|
||||||
|
// or calendar, represented as seconds and fractions of seconds at
|
||||||
|
// nanosecond resolution in UTC Epoch time. It is encoded using the
|
||||||
|
// Proleptic Gregorian Calendar which extends the Gregorian calendar
|
||||||
|
// backwards to year one. It is encoded assuming all minutes are 60
|
||||||
|
// seconds long, i.e. leap seconds are "smeared" so that no leap second
|
||||||
|
// table is needed for interpretation. Range is from
|
||||||
|
// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
|
||||||
|
// By restricting to that range, we ensure that we can convert to
|
||||||
|
// and from RFC 3339 date strings.
|
||||||
|
// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
|
||||||
|
//
|
||||||
|
// Example 1: Compute Timestamp from POSIX `time()`.
|
||||||
|
//
|
||||||
|
// Timestamp timestamp;
|
||||||
|
// timestamp.set_seconds(time(NULL));
|
||||||
|
// timestamp.set_nanos(0);
|
||||||
|
//
|
||||||
|
// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||||
|
//
|
||||||
|
// struct timeval tv;
|
||||||
|
// gettimeofday(&tv, NULL);
|
||||||
|
//
|
||||||
|
// Timestamp timestamp;
|
||||||
|
// timestamp.set_seconds(tv.tv_sec);
|
||||||
|
// timestamp.set_nanos(tv.tv_usec * 1000);
|
||||||
|
//
|
||||||
|
// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||||
|
//
|
||||||
|
// FILETIME ft;
|
||||||
|
// GetSystemTimeAsFileTime(&ft);
|
||||||
|
// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||||
|
//
|
||||||
|
// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||||
|
// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||||
|
// Timestamp timestamp;
|
||||||
|
// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||||
|
// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||||
|
//
|
||||||
|
// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||||
|
//
|
||||||
|
// long millis = System.currentTimeMillis();
|
||||||
|
//
|
||||||
|
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||||
|
// .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// Example 5: Compute Timestamp from current time in Python.
|
||||||
|
//
|
||||||
|
// timestamp = Timestamp()
|
||||||
|
// timestamp.GetCurrentTime()
|
||||||
|
//
|
||||||
|
//
|
||||||
|
message Timestamp {
|
||||||
|
|
||||||
|
// Represents seconds of UTC time since Unix epoch
|
||||||
|
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||||
|
// 9999-12-31T23:59:59Z inclusive.
|
||||||
|
int64 seconds = 1;
|
||||||
|
|
||||||
|
// Non-negative fractions of a second at nanosecond resolution. Negative
|
||||||
|
// second values with fractions must still have non-negative nanos values
|
||||||
|
// that count forward in time. Must be from 0 to 999,999,999
|
||||||
|
// inclusive.
|
||||||
|
int32 nanos = 2;
|
||||||
|
}
|
118
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/wrappers.proto
generated
vendored
Normal file
118
vendor/github.com/gogo/protobuf/protobuf/google/protobuf/wrappers.proto
generated
vendored
Normal file
|
@ -0,0 +1,118 @@
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Wrappers for primitive (non-message) types. These types are useful
|
||||||
|
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||||
|
// where we need to distinguish between the absence of a primitive
|
||||||
|
// typed field and its default value.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package google.protobuf;
|
||||||
|
|
||||||
|
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||||
|
option cc_enable_arenas = true;
|
||||||
|
option go_package = "types";
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "WrappersProto";
|
||||||
|
option java_multiple_files = true;
|
||||||
|
option objc_class_prefix = "GPB";
|
||||||
|
|
||||||
|
// Wrapper message for `double`.
|
||||||
|
//
|
||||||
|
// The JSON representation for `DoubleValue` is JSON number.
|
||||||
|
message DoubleValue {
|
||||||
|
// The double value.
|
||||||
|
double value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrapper message for `float`.
|
||||||
|
//
|
||||||
|
// The JSON representation for `FloatValue` is JSON number.
|
||||||
|
message FloatValue {
|
||||||
|
// The float value.
|
||||||
|
float value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrapper message for `int64`.
|
||||||
|
//
|
||||||
|
// The JSON representation for `Int64Value` is JSON string.
|
||||||
|
message Int64Value {
|
||||||
|
// The int64 value.
|
||||||
|
int64 value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrapper message for `uint64`.
|
||||||
|
//
|
||||||
|
// The JSON representation for `UInt64Value` is JSON string.
|
||||||
|
message UInt64Value {
|
||||||
|
// The uint64 value.
|
||||||
|
uint64 value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrapper message for `int32`.
|
||||||
|
//
|
||||||
|
// The JSON representation for `Int32Value` is JSON number.
|
||||||
|
message Int32Value {
|
||||||
|
// The int32 value.
|
||||||
|
int32 value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrapper message for `uint32`.
|
||||||
|
//
|
||||||
|
// The JSON representation for `UInt32Value` is JSON number.
|
||||||
|
message UInt32Value {
|
||||||
|
// The uint32 value.
|
||||||
|
uint32 value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrapper message for `bool`.
|
||||||
|
//
|
||||||
|
// The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||||
|
message BoolValue {
|
||||||
|
// The bool value.
|
||||||
|
bool value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrapper message for `string`.
|
||||||
|
//
|
||||||
|
// The JSON representation for `StringValue` is JSON string.
|
||||||
|
message StringValue {
|
||||||
|
// The string value.
|
||||||
|
string value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrapper message for `bytes`.
|
||||||
|
//
|
||||||
|
// The JSON representation for `BytesValue` is JSON string.
|
||||||
|
message BytesValue {
|
||||||
|
// The bytes value.
|
||||||
|
bytes value = 1;
|
||||||
|
}
|
118
vendor/github.com/gogo/protobuf/protoc-gen-gogo/descriptor/descriptor.go
generated
vendored
Normal file
118
vendor/github.com/gogo/protobuf/protoc-gen-gogo/descriptor/descriptor.go
generated
vendored
Normal file
|
@ -0,0 +1,118 @@
|
||||||
|
// Go support for Protocol Buffers - Google's data interchange format
|
||||||
|
//
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// https://github.com/golang/protobuf
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
// Package descriptor provides functions for obtaining protocol buffer
|
||||||
|
// descriptors for generated Go types.
|
||||||
|
//
|
||||||
|
// These functions cannot go in package proto because they depend on the
|
||||||
|
// generated protobuf descriptor messages, which themselves depend on proto.
|
||||||
|
package descriptor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"compress/gzip"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
|
||||||
|
"github.com/gogo/protobuf/proto"
|
||||||
|
)
|
||||||
|
|
||||||
|
// extractFile extracts a FileDescriptorProto from a gzip'd buffer.
|
||||||
|
func extractFile(gz []byte) (*FileDescriptorProto, error) {
|
||||||
|
r, err := gzip.NewReader(bytes.NewReader(gz))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to open gzip reader: %v", err)
|
||||||
|
}
|
||||||
|
defer r.Close()
|
||||||
|
|
||||||
|
b, err := ioutil.ReadAll(r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to uncompress descriptor: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fd := new(FileDescriptorProto)
|
||||||
|
if err := proto.Unmarshal(b, fd); err != nil {
|
||||||
|
return nil, fmt.Errorf("malformed FileDescriptorProto: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return fd, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Message is a proto.Message with a method to return its descriptor.
|
||||||
|
//
|
||||||
|
// Message types generated by the protocol compiler always satisfy
|
||||||
|
// the Message interface.
|
||||||
|
type Message interface {
|
||||||
|
proto.Message
|
||||||
|
Descriptor() ([]byte, []int)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForMessage returns a FileDescriptorProto and a DescriptorProto from within it
|
||||||
|
// describing the given message.
|
||||||
|
func ForMessage(msg Message) (fd *FileDescriptorProto, md *DescriptorProto) {
|
||||||
|
gz, path := msg.Descriptor()
|
||||||
|
fd, err := extractFile(gz)
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("invalid FileDescriptorProto for %T: %v", msg, err))
|
||||||
|
}
|
||||||
|
|
||||||
|
md = fd.MessageType[path[0]]
|
||||||
|
for _, i := range path[1:] {
|
||||||
|
md = md.NestedType[i]
|
||||||
|
}
|
||||||
|
return fd, md
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is this field a scalar numeric type?
|
||||||
|
func (field *FieldDescriptorProto) IsScalar() bool {
|
||||||
|
if field.Type == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
switch *field.Type {
|
||||||
|
case FieldDescriptorProto_TYPE_DOUBLE,
|
||||||
|
FieldDescriptorProto_TYPE_FLOAT,
|
||||||
|
FieldDescriptorProto_TYPE_INT64,
|
||||||
|
FieldDescriptorProto_TYPE_UINT64,
|
||||||
|
FieldDescriptorProto_TYPE_INT32,
|
||||||
|
FieldDescriptorProto_TYPE_FIXED64,
|
||||||
|
FieldDescriptorProto_TYPE_FIXED32,
|
||||||
|
FieldDescriptorProto_TYPE_BOOL,
|
||||||
|
FieldDescriptorProto_TYPE_UINT32,
|
||||||
|
FieldDescriptorProto_TYPE_ENUM,
|
||||||
|
FieldDescriptorProto_TYPE_SFIXED32,
|
||||||
|
FieldDescriptorProto_TYPE_SFIXED64,
|
||||||
|
FieldDescriptorProto_TYPE_SINT32,
|
||||||
|
FieldDescriptorProto_TYPE_SINT64:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
511
vendor/github.com/gogo/protobuf/protoc-gen-gogo/descriptor/descriptor.pb.go
generated
vendored
511
vendor/github.com/gogo/protobuf/protoc-gen-gogo/descriptor/descriptor.pb.go
generated
vendored
|
@ -21,12 +21,14 @@ It has these top-level messages:
|
||||||
FileOptions
|
FileOptions
|
||||||
MessageOptions
|
MessageOptions
|
||||||
FieldOptions
|
FieldOptions
|
||||||
|
OneofOptions
|
||||||
EnumOptions
|
EnumOptions
|
||||||
EnumValueOptions
|
EnumValueOptions
|
||||||
ServiceOptions
|
ServiceOptions
|
||||||
MethodOptions
|
MethodOptions
|
||||||
UninterpretedOption
|
UninterpretedOption
|
||||||
SourceCodeInfo
|
SourceCodeInfo
|
||||||
|
GeneratedCodeInfo
|
||||||
*/
|
*/
|
||||||
package descriptor
|
package descriptor
|
||||||
|
|
||||||
|
@ -63,6 +65,10 @@ const (
|
||||||
FieldDescriptorProto_TYPE_FIXED32 FieldDescriptorProto_Type = 7
|
FieldDescriptorProto_TYPE_FIXED32 FieldDescriptorProto_Type = 7
|
||||||
FieldDescriptorProto_TYPE_BOOL FieldDescriptorProto_Type = 8
|
FieldDescriptorProto_TYPE_BOOL FieldDescriptorProto_Type = 8
|
||||||
FieldDescriptorProto_TYPE_STRING FieldDescriptorProto_Type = 9
|
FieldDescriptorProto_TYPE_STRING FieldDescriptorProto_Type = 9
|
||||||
|
// Tag-delimited aggregate.
|
||||||
|
// Group type is deprecated and not supported in proto3. However, Proto3
|
||||||
|
// implementations should still be able to parse the group wire format and
|
||||||
|
// treat group fields as unknown fields.
|
||||||
FieldDescriptorProto_TYPE_GROUP FieldDescriptorProto_Type = 10
|
FieldDescriptorProto_TYPE_GROUP FieldDescriptorProto_Type = 10
|
||||||
FieldDescriptorProto_TYPE_MESSAGE FieldDescriptorProto_Type = 11
|
FieldDescriptorProto_TYPE_MESSAGE FieldDescriptorProto_Type = 11
|
||||||
// New in version 2.
|
// New in version 2.
|
||||||
|
@ -299,6 +305,48 @@ func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) {
|
||||||
return fileDescriptorDescriptor, []int{11, 1}
|
return fileDescriptorDescriptor, []int{11, 1}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
|
||||||
|
// or neither? HTTP based RPC implementation may choose GET verb for safe
|
||||||
|
// methods, and PUT verb for idempotent methods instead of the default POST.
|
||||||
|
type MethodOptions_IdempotencyLevel int32
|
||||||
|
|
||||||
|
const (
|
||||||
|
MethodOptions_IDEMPOTENCY_UNKNOWN MethodOptions_IdempotencyLevel = 0
|
||||||
|
MethodOptions_NO_SIDE_EFFECTS MethodOptions_IdempotencyLevel = 1
|
||||||
|
MethodOptions_IDEMPOTENT MethodOptions_IdempotencyLevel = 2
|
||||||
|
)
|
||||||
|
|
||||||
|
var MethodOptions_IdempotencyLevel_name = map[int32]string{
|
||||||
|
0: "IDEMPOTENCY_UNKNOWN",
|
||||||
|
1: "NO_SIDE_EFFECTS",
|
||||||
|
2: "IDEMPOTENT",
|
||||||
|
}
|
||||||
|
var MethodOptions_IdempotencyLevel_value = map[string]int32{
|
||||||
|
"IDEMPOTENCY_UNKNOWN": 0,
|
||||||
|
"NO_SIDE_EFFECTS": 1,
|
||||||
|
"IDEMPOTENT": 2,
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x MethodOptions_IdempotencyLevel) Enum() *MethodOptions_IdempotencyLevel {
|
||||||
|
p := new(MethodOptions_IdempotencyLevel)
|
||||||
|
*p = x
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
func (x MethodOptions_IdempotencyLevel) String() string {
|
||||||
|
return proto.EnumName(MethodOptions_IdempotencyLevel_name, int32(x))
|
||||||
|
}
|
||||||
|
func (x *MethodOptions_IdempotencyLevel) UnmarshalJSON(data []byte) error {
|
||||||
|
value, err := proto.UnmarshalJSONEnum(MethodOptions_IdempotencyLevel_value, data, "MethodOptions_IdempotencyLevel")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*x = MethodOptions_IdempotencyLevel(value)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
func (MethodOptions_IdempotencyLevel) EnumDescriptor() ([]byte, []int) {
|
||||||
|
return fileDescriptorDescriptor, []int{16, 0}
|
||||||
|
}
|
||||||
|
|
||||||
// The protocol compiler can output a FileDescriptorSet containing the .proto
|
// The protocol compiler can output a FileDescriptorSet containing the .proto
|
||||||
// files it parses.
|
// files it parses.
|
||||||
type FileDescriptorSet struct {
|
type FileDescriptorSet struct {
|
||||||
|
@ -697,6 +745,7 @@ func (m *FieldDescriptorProto) GetOptions() *FieldOptions {
|
||||||
// Describes a oneof.
|
// Describes a oneof.
|
||||||
type OneofDescriptorProto struct {
|
type OneofDescriptorProto struct {
|
||||||
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||||
|
Options *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"`
|
||||||
XXX_unrecognized []byte `json:"-"`
|
XXX_unrecognized []byte `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -712,6 +761,13 @@ func (m *OneofDescriptorProto) GetName() string {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *OneofDescriptorProto) GetOptions() *OneofOptions {
|
||||||
|
if m != nil {
|
||||||
|
return m.Options
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// Describes an enum type.
|
// Describes an enum type.
|
||||||
type EnumDescriptorProto struct {
|
type EnumDescriptorProto struct {
|
||||||
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
|
||||||
|
@ -900,19 +956,8 @@ type FileOptions struct {
|
||||||
// generated to contain the file's getDescriptor() method as well as any
|
// generated to contain the file's getDescriptor() method as well as any
|
||||||
// top-level extensions defined in the file.
|
// top-level extensions defined in the file.
|
||||||
JavaMultipleFiles *bool `protobuf:"varint,10,opt,name=java_multiple_files,json=javaMultipleFiles,def=0" json:"java_multiple_files,omitempty"`
|
JavaMultipleFiles *bool `protobuf:"varint,10,opt,name=java_multiple_files,json=javaMultipleFiles,def=0" json:"java_multiple_files,omitempty"`
|
||||||
// If set true, then the Java code generator will generate equals() and
|
// This option does nothing.
|
||||||
// hashCode() methods for all messages defined in the .proto file.
|
JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"`
|
||||||
// This increases generated code size, potentially substantially for large
|
|
||||||
// protos, which may harm a memory-constrained application.
|
|
||||||
// - In the full runtime this is a speed optimization, as the
|
|
||||||
// AbstractMessage base class includes reflection-based implementations of
|
|
||||||
// these methods.
|
|
||||||
// - In the lite runtime, setting this option changes the semantics of
|
|
||||||
// equals() and hashCode() to more closely match those of the full runtime;
|
|
||||||
// the generated methods compute their results based on field values rather
|
|
||||||
// than object identity. (Implementations should not assume that hashcodes
|
|
||||||
// will be consistent across runtimes or versions of the protocol compiler.)
|
|
||||||
JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash,def=0" json:"java_generate_equals_and_hash,omitempty"`
|
|
||||||
// If set true, then the Java2 code generator will generate code that
|
// If set true, then the Java2 code generator will generate code that
|
||||||
// throws an exception whenever an attempt is made to assign a non-UTF-8
|
// throws an exception whenever an attempt is made to assign a non-UTF-8
|
||||||
// byte sequence to a string field.
|
// byte sequence to a string field.
|
||||||
|
@ -953,9 +998,11 @@ type FileOptions struct {
|
||||||
ObjcClassPrefix *string `protobuf:"bytes,36,opt,name=objc_class_prefix,json=objcClassPrefix" json:"objc_class_prefix,omitempty"`
|
ObjcClassPrefix *string `protobuf:"bytes,36,opt,name=objc_class_prefix,json=objcClassPrefix" json:"objc_class_prefix,omitempty"`
|
||||||
// Namespace for generated classes; defaults to the package.
|
// Namespace for generated classes; defaults to the package.
|
||||||
CsharpNamespace *string `protobuf:"bytes,37,opt,name=csharp_namespace,json=csharpNamespace" json:"csharp_namespace,omitempty"`
|
CsharpNamespace *string `protobuf:"bytes,37,opt,name=csharp_namespace,json=csharpNamespace" json:"csharp_namespace,omitempty"`
|
||||||
// Whether the nano proto compiler should generate in the deprecated non-nano
|
// By default Swift generators will take the proto package and CamelCase it
|
||||||
// suffixed package.
|
// replacing '.' with underscore and use that to prefix the types/symbols
|
||||||
JavananoUseDeprecatedPackage *bool `protobuf:"varint,38,opt,name=javanano_use_deprecated_package,json=javananoUseDeprecatedPackage" json:"javanano_use_deprecated_package,omitempty"`
|
// defined. When this options is provided, they will use this value instead
|
||||||
|
// to prefix the types/symbols defined.
|
||||||
|
SwiftPrefix *string `protobuf:"bytes,39,opt,name=swift_prefix,json=swiftPrefix" json:"swift_prefix,omitempty"`
|
||||||
// The parser stores options it doesn't recognize here. See above.
|
// The parser stores options it doesn't recognize here. See above.
|
||||||
UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
|
UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
|
||||||
proto.XXX_InternalExtensions `json:"-"`
|
proto.XXX_InternalExtensions `json:"-"`
|
||||||
|
@ -976,7 +1023,6 @@ func (*FileOptions) ExtensionRangeArray() []proto.ExtensionRange {
|
||||||
}
|
}
|
||||||
|
|
||||||
const Default_FileOptions_JavaMultipleFiles bool = false
|
const Default_FileOptions_JavaMultipleFiles bool = false
|
||||||
const Default_FileOptions_JavaGenerateEqualsAndHash bool = false
|
|
||||||
const Default_FileOptions_JavaStringCheckUtf8 bool = false
|
const Default_FileOptions_JavaStringCheckUtf8 bool = false
|
||||||
const Default_FileOptions_OptimizeFor FileOptions_OptimizeMode = FileOptions_SPEED
|
const Default_FileOptions_OptimizeFor FileOptions_OptimizeMode = FileOptions_SPEED
|
||||||
const Default_FileOptions_CcGenericServices bool = false
|
const Default_FileOptions_CcGenericServices bool = false
|
||||||
|
@ -1010,7 +1056,7 @@ func (m *FileOptions) GetJavaGenerateEqualsAndHash() bool {
|
||||||
if m != nil && m.JavaGenerateEqualsAndHash != nil {
|
if m != nil && m.JavaGenerateEqualsAndHash != nil {
|
||||||
return *m.JavaGenerateEqualsAndHash
|
return *m.JavaGenerateEqualsAndHash
|
||||||
}
|
}
|
||||||
return Default_FileOptions_JavaGenerateEqualsAndHash
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *FileOptions) GetJavaStringCheckUtf8() bool {
|
func (m *FileOptions) GetJavaStringCheckUtf8() bool {
|
||||||
|
@ -1083,11 +1129,11 @@ func (m *FileOptions) GetCsharpNamespace() string {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *FileOptions) GetJavananoUseDeprecatedPackage() bool {
|
func (m *FileOptions) GetSwiftPrefix() string {
|
||||||
if m != nil && m.JavananoUseDeprecatedPackage != nil {
|
if m != nil && m.SwiftPrefix != nil {
|
||||||
return *m.JavananoUseDeprecatedPackage
|
return *m.SwiftPrefix
|
||||||
}
|
}
|
||||||
return false
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *FileOptions) GetUninterpretedOption() []*UninterpretedOption {
|
func (m *FileOptions) GetUninterpretedOption() []*UninterpretedOption {
|
||||||
|
@ -1247,7 +1293,7 @@ type FieldOptions struct {
|
||||||
//
|
//
|
||||||
//
|
//
|
||||||
// Note that implementations may choose not to check required fields within
|
// Note that implementations may choose not to check required fields within
|
||||||
// a lazy sub-message. That is, calling IsInitialized() on the outher message
|
// a lazy sub-message. That is, calling IsInitialized() on the outer message
|
||||||
// may return true even if the inner message has missing required fields.
|
// may return true even if the inner message has missing required fields.
|
||||||
// This is necessary because otherwise the inner message would have to be
|
// This is necessary because otherwise the inner message would have to be
|
||||||
// parsed in order to perform the check, defeating the purpose of lazy
|
// parsed in order to perform the check, defeating the purpose of lazy
|
||||||
|
@ -1338,6 +1384,33 @@ func (m *FieldOptions) GetUninterpretedOption() []*UninterpretedOption {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type OneofOptions struct {
|
||||||
|
// The parser stores options it doesn't recognize here. See above.
|
||||||
|
UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
|
||||||
|
proto.XXX_InternalExtensions `json:"-"`
|
||||||
|
XXX_unrecognized []byte `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *OneofOptions) Reset() { *m = OneofOptions{} }
|
||||||
|
func (m *OneofOptions) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*OneofOptions) ProtoMessage() {}
|
||||||
|
func (*OneofOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{12} }
|
||||||
|
|
||||||
|
var extRange_OneofOptions = []proto.ExtensionRange{
|
||||||
|
{Start: 1000, End: 536870911},
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*OneofOptions) ExtensionRangeArray() []proto.ExtensionRange {
|
||||||
|
return extRange_OneofOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *OneofOptions) GetUninterpretedOption() []*UninterpretedOption {
|
||||||
|
if m != nil {
|
||||||
|
return m.UninterpretedOption
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
type EnumOptions struct {
|
type EnumOptions struct {
|
||||||
// Set this option to true to allow mapping different tag names to the same
|
// Set this option to true to allow mapping different tag names to the same
|
||||||
// value.
|
// value.
|
||||||
|
@ -1356,7 +1429,7 @@ type EnumOptions struct {
|
||||||
func (m *EnumOptions) Reset() { *m = EnumOptions{} }
|
func (m *EnumOptions) Reset() { *m = EnumOptions{} }
|
||||||
func (m *EnumOptions) String() string { return proto.CompactTextString(m) }
|
func (m *EnumOptions) String() string { return proto.CompactTextString(m) }
|
||||||
func (*EnumOptions) ProtoMessage() {}
|
func (*EnumOptions) ProtoMessage() {}
|
||||||
func (*EnumOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{12} }
|
func (*EnumOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{13} }
|
||||||
|
|
||||||
var extRange_EnumOptions = []proto.ExtensionRange{
|
var extRange_EnumOptions = []proto.ExtensionRange{
|
||||||
{Start: 1000, End: 536870911},
|
{Start: 1000, End: 536870911},
|
||||||
|
@ -1404,7 +1477,7 @@ type EnumValueOptions struct {
|
||||||
func (m *EnumValueOptions) Reset() { *m = EnumValueOptions{} }
|
func (m *EnumValueOptions) Reset() { *m = EnumValueOptions{} }
|
||||||
func (m *EnumValueOptions) String() string { return proto.CompactTextString(m) }
|
func (m *EnumValueOptions) String() string { return proto.CompactTextString(m) }
|
||||||
func (*EnumValueOptions) ProtoMessage() {}
|
func (*EnumValueOptions) ProtoMessage() {}
|
||||||
func (*EnumValueOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{13} }
|
func (*EnumValueOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{14} }
|
||||||
|
|
||||||
var extRange_EnumValueOptions = []proto.ExtensionRange{
|
var extRange_EnumValueOptions = []proto.ExtensionRange{
|
||||||
{Start: 1000, End: 536870911},
|
{Start: 1000, End: 536870911},
|
||||||
|
@ -1445,7 +1518,7 @@ type ServiceOptions struct {
|
||||||
func (m *ServiceOptions) Reset() { *m = ServiceOptions{} }
|
func (m *ServiceOptions) Reset() { *m = ServiceOptions{} }
|
||||||
func (m *ServiceOptions) String() string { return proto.CompactTextString(m) }
|
func (m *ServiceOptions) String() string { return proto.CompactTextString(m) }
|
||||||
func (*ServiceOptions) ProtoMessage() {}
|
func (*ServiceOptions) ProtoMessage() {}
|
||||||
func (*ServiceOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{14} }
|
func (*ServiceOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{15} }
|
||||||
|
|
||||||
var extRange_ServiceOptions = []proto.ExtensionRange{
|
var extRange_ServiceOptions = []proto.ExtensionRange{
|
||||||
{Start: 1000, End: 536870911},
|
{Start: 1000, End: 536870911},
|
||||||
|
@ -1477,6 +1550,7 @@ type MethodOptions struct {
|
||||||
// for the method, or it will be completely ignored; in the very least,
|
// for the method, or it will be completely ignored; in the very least,
|
||||||
// this is a formalization for deprecating methods.
|
// this is a formalization for deprecating methods.
|
||||||
Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"`
|
Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"`
|
||||||
|
IdempotencyLevel *MethodOptions_IdempotencyLevel `protobuf:"varint,34,opt,name=idempotency_level,json=idempotencyLevel,enum=google.protobuf.MethodOptions_IdempotencyLevel,def=0" json:"idempotency_level,omitempty"`
|
||||||
// The parser stores options it doesn't recognize here. See above.
|
// The parser stores options it doesn't recognize here. See above.
|
||||||
UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
|
UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"`
|
||||||
proto.XXX_InternalExtensions `json:"-"`
|
proto.XXX_InternalExtensions `json:"-"`
|
||||||
|
@ -1486,7 +1560,7 @@ type MethodOptions struct {
|
||||||
func (m *MethodOptions) Reset() { *m = MethodOptions{} }
|
func (m *MethodOptions) Reset() { *m = MethodOptions{} }
|
||||||
func (m *MethodOptions) String() string { return proto.CompactTextString(m) }
|
func (m *MethodOptions) String() string { return proto.CompactTextString(m) }
|
||||||
func (*MethodOptions) ProtoMessage() {}
|
func (*MethodOptions) ProtoMessage() {}
|
||||||
func (*MethodOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{15} }
|
func (*MethodOptions) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{16} }
|
||||||
|
|
||||||
var extRange_MethodOptions = []proto.ExtensionRange{
|
var extRange_MethodOptions = []proto.ExtensionRange{
|
||||||
{Start: 1000, End: 536870911},
|
{Start: 1000, End: 536870911},
|
||||||
|
@ -1497,6 +1571,7 @@ func (*MethodOptions) ExtensionRangeArray() []proto.ExtensionRange {
|
||||||
}
|
}
|
||||||
|
|
||||||
const Default_MethodOptions_Deprecated bool = false
|
const Default_MethodOptions_Deprecated bool = false
|
||||||
|
const Default_MethodOptions_IdempotencyLevel MethodOptions_IdempotencyLevel = MethodOptions_IDEMPOTENCY_UNKNOWN
|
||||||
|
|
||||||
func (m *MethodOptions) GetDeprecated() bool {
|
func (m *MethodOptions) GetDeprecated() bool {
|
||||||
if m != nil && m.Deprecated != nil {
|
if m != nil && m.Deprecated != nil {
|
||||||
|
@ -1505,6 +1580,13 @@ func (m *MethodOptions) GetDeprecated() bool {
|
||||||
return Default_MethodOptions_Deprecated
|
return Default_MethodOptions_Deprecated
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *MethodOptions) GetIdempotencyLevel() MethodOptions_IdempotencyLevel {
|
||||||
|
if m != nil && m.IdempotencyLevel != nil {
|
||||||
|
return *m.IdempotencyLevel
|
||||||
|
}
|
||||||
|
return Default_MethodOptions_IdempotencyLevel
|
||||||
|
}
|
||||||
|
|
||||||
func (m *MethodOptions) GetUninterpretedOption() []*UninterpretedOption {
|
func (m *MethodOptions) GetUninterpretedOption() []*UninterpretedOption {
|
||||||
if m != nil {
|
if m != nil {
|
||||||
return m.UninterpretedOption
|
return m.UninterpretedOption
|
||||||
|
@ -1534,7 +1616,7 @@ type UninterpretedOption struct {
|
||||||
func (m *UninterpretedOption) Reset() { *m = UninterpretedOption{} }
|
func (m *UninterpretedOption) Reset() { *m = UninterpretedOption{} }
|
||||||
func (m *UninterpretedOption) String() string { return proto.CompactTextString(m) }
|
func (m *UninterpretedOption) String() string { return proto.CompactTextString(m) }
|
||||||
func (*UninterpretedOption) ProtoMessage() {}
|
func (*UninterpretedOption) ProtoMessage() {}
|
||||||
func (*UninterpretedOption) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{16} }
|
func (*UninterpretedOption) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{17} }
|
||||||
|
|
||||||
func (m *UninterpretedOption) GetName() []*UninterpretedOption_NamePart {
|
func (m *UninterpretedOption) GetName() []*UninterpretedOption_NamePart {
|
||||||
if m != nil {
|
if m != nil {
|
||||||
|
@ -1600,7 +1682,7 @@ func (m *UninterpretedOption_NamePart) Reset() { *m = UninterpretedOptio
|
||||||
func (m *UninterpretedOption_NamePart) String() string { return proto.CompactTextString(m) }
|
func (m *UninterpretedOption_NamePart) String() string { return proto.CompactTextString(m) }
|
||||||
func (*UninterpretedOption_NamePart) ProtoMessage() {}
|
func (*UninterpretedOption_NamePart) ProtoMessage() {}
|
||||||
func (*UninterpretedOption_NamePart) Descriptor() ([]byte, []int) {
|
func (*UninterpretedOption_NamePart) Descriptor() ([]byte, []int) {
|
||||||
return fileDescriptorDescriptor, []int{16, 0}
|
return fileDescriptorDescriptor, []int{17, 0}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *UninterpretedOption_NamePart) GetNamePart() string {
|
func (m *UninterpretedOption_NamePart) GetNamePart() string {
|
||||||
|
@ -1670,7 +1752,7 @@ type SourceCodeInfo struct {
|
||||||
func (m *SourceCodeInfo) Reset() { *m = SourceCodeInfo{} }
|
func (m *SourceCodeInfo) Reset() { *m = SourceCodeInfo{} }
|
||||||
func (m *SourceCodeInfo) String() string { return proto.CompactTextString(m) }
|
func (m *SourceCodeInfo) String() string { return proto.CompactTextString(m) }
|
||||||
func (*SourceCodeInfo) ProtoMessage() {}
|
func (*SourceCodeInfo) ProtoMessage() {}
|
||||||
func (*SourceCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{17} }
|
func (*SourceCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{18} }
|
||||||
|
|
||||||
func (m *SourceCodeInfo) GetLocation() []*SourceCodeInfo_Location {
|
func (m *SourceCodeInfo) GetLocation() []*SourceCodeInfo_Location {
|
||||||
if m != nil {
|
if m != nil {
|
||||||
|
@ -1767,7 +1849,7 @@ func (m *SourceCodeInfo_Location) Reset() { *m = SourceCodeInfo_Location
|
||||||
func (m *SourceCodeInfo_Location) String() string { return proto.CompactTextString(m) }
|
func (m *SourceCodeInfo_Location) String() string { return proto.CompactTextString(m) }
|
||||||
func (*SourceCodeInfo_Location) ProtoMessage() {}
|
func (*SourceCodeInfo_Location) ProtoMessage() {}
|
||||||
func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) {
|
func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) {
|
||||||
return fileDescriptorDescriptor, []int{17, 0}
|
return fileDescriptorDescriptor, []int{18, 0}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *SourceCodeInfo_Location) GetPath() []int32 {
|
func (m *SourceCodeInfo_Location) GetPath() []int32 {
|
||||||
|
@ -1805,6 +1887,79 @@ func (m *SourceCodeInfo_Location) GetLeadingDetachedComments() []string {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Describes the relationship between generated code and its original source
|
||||||
|
// file. A GeneratedCodeInfo message is associated with only one generated
|
||||||
|
// source file, but may contain references to different source .proto files.
|
||||||
|
type GeneratedCodeInfo struct {
|
||||||
|
// An Annotation connects some span of text in generated code to an element
|
||||||
|
// of its generating .proto file.
|
||||||
|
Annotation []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"`
|
||||||
|
XXX_unrecognized []byte `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *GeneratedCodeInfo) Reset() { *m = GeneratedCodeInfo{} }
|
||||||
|
func (m *GeneratedCodeInfo) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*GeneratedCodeInfo) ProtoMessage() {}
|
||||||
|
func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{19} }
|
||||||
|
|
||||||
|
func (m *GeneratedCodeInfo) GetAnnotation() []*GeneratedCodeInfo_Annotation {
|
||||||
|
if m != nil {
|
||||||
|
return m.Annotation
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type GeneratedCodeInfo_Annotation struct {
|
||||||
|
// Identifies the element in the original source .proto file. This field
|
||||||
|
// is formatted the same as SourceCodeInfo.Location.path.
|
||||||
|
Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"`
|
||||||
|
// Identifies the filesystem path to the original source .proto.
|
||||||
|
SourceFile *string `protobuf:"bytes,2,opt,name=source_file,json=sourceFile" json:"source_file,omitempty"`
|
||||||
|
// Identifies the starting offset in bytes in the generated code
|
||||||
|
// that relates to the identified object.
|
||||||
|
Begin *int32 `protobuf:"varint,3,opt,name=begin" json:"begin,omitempty"`
|
||||||
|
// Identifies the ending offset in bytes in the generated code that
|
||||||
|
// relates to the identified offset. The end offset should be one past
|
||||||
|
// the last relevant byte (so the length of the text = end - begin).
|
||||||
|
End *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"`
|
||||||
|
XXX_unrecognized []byte `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *GeneratedCodeInfo_Annotation) Reset() { *m = GeneratedCodeInfo_Annotation{} }
|
||||||
|
func (m *GeneratedCodeInfo_Annotation) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*GeneratedCodeInfo_Annotation) ProtoMessage() {}
|
||||||
|
func (*GeneratedCodeInfo_Annotation) Descriptor() ([]byte, []int) {
|
||||||
|
return fileDescriptorDescriptor, []int{19, 0}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *GeneratedCodeInfo_Annotation) GetPath() []int32 {
|
||||||
|
if m != nil {
|
||||||
|
return m.Path
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *GeneratedCodeInfo_Annotation) GetSourceFile() string {
|
||||||
|
if m != nil && m.SourceFile != nil {
|
||||||
|
return *m.SourceFile
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *GeneratedCodeInfo_Annotation) GetBegin() int32 {
|
||||||
|
if m != nil && m.Begin != nil {
|
||||||
|
return *m.Begin
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *GeneratedCodeInfo_Annotation) GetEnd() int32 {
|
||||||
|
if m != nil && m.End != nil {
|
||||||
|
return *m.End
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
proto.RegisterType((*FileDescriptorSet)(nil), "google.protobuf.FileDescriptorSet")
|
proto.RegisterType((*FileDescriptorSet)(nil), "google.protobuf.FileDescriptorSet")
|
||||||
proto.RegisterType((*FileDescriptorProto)(nil), "google.protobuf.FileDescriptorProto")
|
proto.RegisterType((*FileDescriptorProto)(nil), "google.protobuf.FileDescriptorProto")
|
||||||
|
@ -1820,6 +1975,7 @@ func init() {
|
||||||
proto.RegisterType((*FileOptions)(nil), "google.protobuf.FileOptions")
|
proto.RegisterType((*FileOptions)(nil), "google.protobuf.FileOptions")
|
||||||
proto.RegisterType((*MessageOptions)(nil), "google.protobuf.MessageOptions")
|
proto.RegisterType((*MessageOptions)(nil), "google.protobuf.MessageOptions")
|
||||||
proto.RegisterType((*FieldOptions)(nil), "google.protobuf.FieldOptions")
|
proto.RegisterType((*FieldOptions)(nil), "google.protobuf.FieldOptions")
|
||||||
|
proto.RegisterType((*OneofOptions)(nil), "google.protobuf.OneofOptions")
|
||||||
proto.RegisterType((*EnumOptions)(nil), "google.protobuf.EnumOptions")
|
proto.RegisterType((*EnumOptions)(nil), "google.protobuf.EnumOptions")
|
||||||
proto.RegisterType((*EnumValueOptions)(nil), "google.protobuf.EnumValueOptions")
|
proto.RegisterType((*EnumValueOptions)(nil), "google.protobuf.EnumValueOptions")
|
||||||
proto.RegisterType((*ServiceOptions)(nil), "google.protobuf.ServiceOptions")
|
proto.RegisterType((*ServiceOptions)(nil), "google.protobuf.ServiceOptions")
|
||||||
|
@ -1828,154 +1984,167 @@ func init() {
|
||||||
proto.RegisterType((*UninterpretedOption_NamePart)(nil), "google.protobuf.UninterpretedOption.NamePart")
|
proto.RegisterType((*UninterpretedOption_NamePart)(nil), "google.protobuf.UninterpretedOption.NamePart")
|
||||||
proto.RegisterType((*SourceCodeInfo)(nil), "google.protobuf.SourceCodeInfo")
|
proto.RegisterType((*SourceCodeInfo)(nil), "google.protobuf.SourceCodeInfo")
|
||||||
proto.RegisterType((*SourceCodeInfo_Location)(nil), "google.protobuf.SourceCodeInfo.Location")
|
proto.RegisterType((*SourceCodeInfo_Location)(nil), "google.protobuf.SourceCodeInfo.Location")
|
||||||
|
proto.RegisterType((*GeneratedCodeInfo)(nil), "google.protobuf.GeneratedCodeInfo")
|
||||||
|
proto.RegisterType((*GeneratedCodeInfo_Annotation)(nil), "google.protobuf.GeneratedCodeInfo.Annotation")
|
||||||
proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Type", FieldDescriptorProto_Type_name, FieldDescriptorProto_Type_value)
|
proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Type", FieldDescriptorProto_Type_name, FieldDescriptorProto_Type_value)
|
||||||
proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Label", FieldDescriptorProto_Label_name, FieldDescriptorProto_Label_value)
|
proto.RegisterEnum("google.protobuf.FieldDescriptorProto_Label", FieldDescriptorProto_Label_name, FieldDescriptorProto_Label_value)
|
||||||
proto.RegisterEnum("google.protobuf.FileOptions_OptimizeMode", FileOptions_OptimizeMode_name, FileOptions_OptimizeMode_value)
|
proto.RegisterEnum("google.protobuf.FileOptions_OptimizeMode", FileOptions_OptimizeMode_name, FileOptions_OptimizeMode_value)
|
||||||
proto.RegisterEnum("google.protobuf.FieldOptions_CType", FieldOptions_CType_name, FieldOptions_CType_value)
|
proto.RegisterEnum("google.protobuf.FieldOptions_CType", FieldOptions_CType_name, FieldOptions_CType_value)
|
||||||
proto.RegisterEnum("google.protobuf.FieldOptions_JSType", FieldOptions_JSType_name, FieldOptions_JSType_value)
|
proto.RegisterEnum("google.protobuf.FieldOptions_JSType", FieldOptions_JSType_name, FieldOptions_JSType_value)
|
||||||
|
proto.RegisterEnum("google.protobuf.MethodOptions_IdempotencyLevel", MethodOptions_IdempotencyLevel_name, MethodOptions_IdempotencyLevel_value)
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() { proto.RegisterFile("descriptor.proto", fileDescriptorDescriptor) }
|
func init() { proto.RegisterFile("descriptor.proto", fileDescriptorDescriptor) }
|
||||||
|
|
||||||
var fileDescriptorDescriptor = []byte{
|
var fileDescriptorDescriptor = []byte{
|
||||||
// 2211 bytes of a gzipped FileDescriptorProto
|
// 2379 bytes of a gzipped FileDescriptorProto
|
||||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xcc, 0x58, 0x4f, 0x73, 0xdb, 0xc6,
|
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xcd, 0x73, 0xdb, 0xc6,
|
||||||
0x15, 0x0f, 0xf8, 0x4f, 0xe4, 0x23, 0x45, 0xad, 0x56, 0x8a, 0x03, 0xcb, 0x76, 0x2c, 0x33, 0x76,
|
0x15, 0x37, 0xc1, 0x0f, 0x91, 0x8f, 0x14, 0xb5, 0x5a, 0x29, 0x36, 0x2c, 0xc7, 0xb1, 0xcc, 0xd8,
|
||||||
0x2c, 0xdb, 0xad, 0x9c, 0x91, 0xff, 0x44, 0x51, 0x3a, 0xe9, 0x50, 0x24, 0xac, 0xd0, 0x43, 0x89,
|
0xb5, 0x6c, 0xb7, 0x74, 0x46, 0xfe, 0x88, 0xa3, 0x74, 0xd2, 0xa1, 0x48, 0x58, 0xa1, 0x4b, 0x91,
|
||||||
0x2c, 0x28, 0xb6, 0x4e, 0x2e, 0x98, 0x15, 0xb0, 0xa4, 0x60, 0x83, 0x0b, 0x14, 0x00, 0x6d, 0x2b,
|
0x2c, 0x48, 0x35, 0x76, 0x2e, 0x98, 0x15, 0xb0, 0xa4, 0x60, 0x83, 0x00, 0x02, 0x80, 0xb6, 0x95,
|
||||||
0x27, 0xcf, 0xf4, 0xd4, 0x6f, 0xd0, 0x69, 0x3b, 0x3d, 0xe4, 0x92, 0x99, 0x7e, 0x80, 0x1e, 0x7a,
|
0x93, 0x67, 0x7a, 0xea, 0x7f, 0xd0, 0xe9, 0x74, 0x7a, 0xc8, 0x25, 0x33, 0xed, 0xbd, 0x87, 0xde,
|
||||||
0xef, 0xb5, 0x87, 0x9e, 0x7b, 0xec, 0x4c, 0xfb, 0x0d, 0x7a, 0xed, 0xec, 0x2e, 0x00, 0x82, 0x7f,
|
0x7b, 0xed, 0x4c, 0xef, 0x3d, 0xf4, 0xd0, 0x99, 0xf6, 0x3f, 0xe8, 0xb5, 0xb3, 0xbb, 0x00, 0x08,
|
||||||
0x14, 0xab, 0x99, 0x49, 0xd3, 0x93, 0xb4, 0xbf, 0xf7, 0x7b, 0x8f, 0x6f, 0xdf, 0xfe, 0xb0, 0xef,
|
0x7e, 0xd9, 0x4a, 0x66, 0x9c, 0x9c, 0xa4, 0xfd, 0xbd, 0xdf, 0x7b, 0x78, 0xfb, 0xf6, 0x87, 0xdd,
|
||||||
0x01, 0x80, 0x2c, 0x1a, 0x98, 0xbe, 0xed, 0x85, 0xae, 0xbf, 0xed, 0xf9, 0x6e, 0xe8, 0xe2, 0x95,
|
0x87, 0x25, 0x20, 0x83, 0xfa, 0xba, 0x67, 0xba, 0x81, 0xe3, 0x55, 0x5d, 0xcf, 0x09, 0x1c, 0xbc,
|
||||||
0xa1, 0xeb, 0x0e, 0x1d, 0x2a, 0x57, 0x27, 0xe3, 0x41, 0xed, 0x10, 0x56, 0x9f, 0xd8, 0x0e, 0x6d,
|
0x36, 0x74, 0x9c, 0xa1, 0x45, 0xc5, 0xe8, 0x78, 0x3c, 0xa8, 0x1c, 0xc2, 0xfa, 0x23, 0xd3, 0xa2,
|
||||||
0x26, 0xc4, 0x1e, 0x0d, 0xf1, 0x2e, 0xe4, 0x06, 0xb6, 0x43, 0x55, 0x65, 0x33, 0xbb, 0x55, 0xde,
|
0x8d, 0x98, 0xd8, 0xa3, 0x01, 0x7e, 0x08, 0x99, 0x81, 0x69, 0x51, 0x39, 0xb5, 0x9d, 0xde, 0x29,
|
||||||
0xb9, 0xb9, 0x3d, 0xe3, 0xb4, 0x3d, 0xed, 0xd1, 0xe5, 0xb0, 0x2e, 0x3c, 0x6a, 0xff, 0xc8, 0xc1,
|
0xee, 0x5e, 0xab, 0xce, 0x38, 0x55, 0xa7, 0x3d, 0xba, 0x0c, 0x56, 0xb9, 0x47, 0xe5, 0xdf, 0x19,
|
||||||
0xda, 0x02, 0x2b, 0xc6, 0x90, 0x63, 0x64, 0xc4, 0x23, 0x2a, 0x5b, 0x25, 0x5d, 0xfc, 0x8f, 0x55,
|
0xd8, 0x58, 0x60, 0xc5, 0x18, 0x32, 0x36, 0x19, 0xb1, 0x88, 0xa9, 0x9d, 0x82, 0xca, 0xff, 0xc7,
|
||||||
0x58, 0xf2, 0x88, 0xf9, 0x82, 0x0c, 0xa9, 0x9a, 0x11, 0x70, 0xbc, 0xc4, 0xef, 0x03, 0x58, 0xd4,
|
0x32, 0xac, 0xb8, 0x44, 0x7f, 0x4e, 0x86, 0x54, 0x96, 0x38, 0x1c, 0x0d, 0xf1, 0x07, 0x00, 0x06,
|
||||||
0xa3, 0xcc, 0xa2, 0xcc, 0x3c, 0x53, 0xb3, 0x9b, 0xd9, 0xad, 0x92, 0x9e, 0x42, 0xf0, 0x3d, 0x58,
|
0x75, 0xa9, 0x6d, 0x50, 0x5b, 0x3f, 0x95, 0xd3, 0xdb, 0xe9, 0x9d, 0x82, 0x9a, 0x40, 0xf0, 0x6d,
|
||||||
0xf5, 0xc6, 0x27, 0x8e, 0x6d, 0x1a, 0x29, 0x1a, 0x6c, 0x66, 0xb7, 0xf2, 0x3a, 0x92, 0x86, 0xe6,
|
0x58, 0x77, 0xc7, 0xc7, 0x96, 0xa9, 0x6b, 0x09, 0x1a, 0x6c, 0xa7, 0x77, 0xb2, 0x2a, 0x12, 0x86,
|
||||||
0x84, 0x7c, 0x1b, 0x56, 0x5e, 0x51, 0xf2, 0x22, 0x4d, 0x2d, 0x0b, 0x6a, 0x95, 0xc3, 0x29, 0x62,
|
0xc6, 0x84, 0x7c, 0x03, 0xd6, 0x5e, 0x52, 0xf2, 0x3c, 0x49, 0x2d, 0x72, 0x6a, 0x99, 0xc1, 0x09,
|
||||||
0x03, 0x2a, 0x23, 0x1a, 0x04, 0x64, 0x48, 0x8d, 0xf0, 0xcc, 0xa3, 0x6a, 0x4e, 0xec, 0x7e, 0x73,
|
0x62, 0x1d, 0x4a, 0x23, 0xea, 0xfb, 0x64, 0x48, 0xb5, 0xe0, 0xd4, 0xa5, 0x72, 0x86, 0xcf, 0x7e,
|
||||||
0x6e, 0xf7, 0xb3, 0x3b, 0x2f, 0x47, 0x5e, 0xc7, 0x67, 0x1e, 0xc5, 0x75, 0x28, 0x51, 0x36, 0x1e,
|
0x7b, 0x6e, 0xf6, 0xb3, 0x33, 0x2f, 0x86, 0x5e, 0xfd, 0x53, 0x97, 0xe2, 0x1a, 0x14, 0xa8, 0x3d,
|
||||||
0xc9, 0x08, 0xf9, 0x73, 0xea, 0xa7, 0xb1, 0xf1, 0x68, 0x36, 0x4a, 0x91, 0xbb, 0x45, 0x21, 0x96,
|
0x1e, 0x89, 0x08, 0xd9, 0x25, 0xf5, 0x53, 0xec, 0xf1, 0x68, 0x36, 0x4a, 0x9e, 0xb9, 0x85, 0x21,
|
||||||
0x02, 0xea, 0xbf, 0xb4, 0x4d, 0xaa, 0x16, 0x44, 0x80, 0xdb, 0x73, 0x01, 0x7a, 0xd2, 0x3e, 0x1b,
|
0x56, 0x7c, 0xea, 0xbd, 0x30, 0x75, 0x2a, 0xe7, 0x78, 0x80, 0x1b, 0x73, 0x01, 0x7a, 0xc2, 0x3e,
|
||||||
0x23, 0xf6, 0xc3, 0x0d, 0x28, 0xd1, 0xd7, 0x21, 0x65, 0x81, 0xed, 0x32, 0x75, 0x49, 0x04, 0xb9,
|
0x1b, 0x23, 0xf2, 0xc3, 0x75, 0x28, 0xd0, 0x57, 0x01, 0xb5, 0x7d, 0xd3, 0xb1, 0xe5, 0x15, 0x1e,
|
||||||
0xb5, 0xe0, 0x14, 0xa9, 0x63, 0xcd, 0x86, 0x98, 0xf8, 0xe1, 0xc7, 0xb0, 0xe4, 0x7a, 0xa1, 0xed,
|
0xe4, 0xfa, 0x82, 0x55, 0xa4, 0x96, 0x31, 0x1b, 0x62, 0xe2, 0x87, 0x1f, 0xc0, 0x8a, 0xe3, 0x06,
|
||||||
0xb2, 0x40, 0x2d, 0x6e, 0x2a, 0x5b, 0xe5, 0x9d, 0xab, 0x0b, 0x85, 0xd0, 0x91, 0x1c, 0x3d, 0x26,
|
0xa6, 0x63, 0xfb, 0x72, 0x7e, 0x3b, 0xb5, 0x53, 0xdc, 0x7d, 0x7f, 0xa1, 0x10, 0x3a, 0x82, 0xa3,
|
||||||
0xe3, 0x16, 0xa0, 0xc0, 0x1d, 0xfb, 0x26, 0x35, 0x4c, 0xd7, 0xa2, 0x86, 0xcd, 0x06, 0xae, 0x5a,
|
0x46, 0x64, 0xdc, 0x04, 0xe4, 0x3b, 0x63, 0x4f, 0xa7, 0x9a, 0xee, 0x18, 0x54, 0x33, 0xed, 0x81,
|
||||||
0x12, 0x01, 0xae, 0xcf, 0x6f, 0x44, 0x10, 0x1b, 0xae, 0x45, 0x5b, 0x6c, 0xe0, 0xea, 0xd5, 0x60,
|
0x23, 0x17, 0x78, 0x80, 0x2b, 0xf3, 0x13, 0xe1, 0xc4, 0xba, 0x63, 0xd0, 0xa6, 0x3d, 0x70, 0xd4,
|
||||||
0x6a, 0x8d, 0x2f, 0x41, 0x21, 0x38, 0x63, 0x21, 0x79, 0xad, 0x56, 0x84, 0x42, 0xa2, 0x55, 0xed,
|
0xb2, 0x3f, 0x35, 0xc6, 0xe7, 0x21, 0xe7, 0x9f, 0xda, 0x01, 0x79, 0x25, 0x97, 0xb8, 0x42, 0xc2,
|
||||||
0xdf, 0x79, 0x58, 0xb9, 0x88, 0xc4, 0x3e, 0x85, 0xfc, 0x80, 0xef, 0x52, 0xcd, 0xfc, 0x37, 0x35,
|
0x51, 0xe5, 0x7f, 0x59, 0x58, 0x3b, 0x8b, 0xc4, 0x3e, 0x85, 0xec, 0x80, 0xcd, 0x52, 0x96, 0xbe,
|
||||||
0x90, 0x3e, 0xd3, 0x45, 0x2c, 0x7c, 0xc7, 0x22, 0xd6, 0xa1, 0xcc, 0x68, 0x10, 0x52, 0x4b, 0x2a,
|
0x4b, 0x0d, 0x84, 0xcf, 0x74, 0x11, 0x73, 0xdf, 0xb3, 0x88, 0x35, 0x28, 0xda, 0xd4, 0x0f, 0xa8,
|
||||||
0x22, 0x7b, 0x41, 0x4d, 0x81, 0x74, 0x9a, 0x97, 0x54, 0xee, 0x3b, 0x49, 0xea, 0x19, 0xac, 0x24,
|
0x21, 0x14, 0x91, 0x3e, 0xa3, 0xa6, 0x40, 0x38, 0xcd, 0x4b, 0x2a, 0xf3, 0xbd, 0x24, 0xf5, 0x04,
|
||||||
0x29, 0x19, 0x3e, 0x61, 0xc3, 0x58, 0x9b, 0xf7, 0xdf, 0x96, 0xc9, 0xb6, 0x16, 0xfb, 0xe9, 0xdc,
|
0xd6, 0xe2, 0x94, 0x34, 0x8f, 0xd8, 0xc3, 0x48, 0x9b, 0x77, 0xde, 0x96, 0x49, 0x55, 0x89, 0xfc,
|
||||||
0x4d, 0xaf, 0xd2, 0xa9, 0x35, 0x6e, 0x02, 0xb8, 0x8c, 0xba, 0x03, 0xc3, 0xa2, 0xa6, 0xa3, 0x16,
|
0x54, 0xe6, 0xa6, 0x96, 0xe9, 0xd4, 0x18, 0x37, 0x00, 0x1c, 0x9b, 0x3a, 0x03, 0xcd, 0xa0, 0xba,
|
||||||
0xcf, 0xa9, 0x52, 0x87, 0x53, 0xe6, 0xaa, 0xe4, 0x4a, 0xd4, 0x74, 0xf0, 0x27, 0x13, 0xa9, 0x2d,
|
0x25, 0xe7, 0x97, 0x54, 0xa9, 0xc3, 0x28, 0x73, 0x55, 0x72, 0x04, 0xaa, 0x5b, 0xf8, 0x93, 0x89,
|
||||||
0x9d, 0xa3, 0x94, 0x43, 0xf9, 0x90, 0xcd, 0xa9, 0xad, 0x0f, 0x55, 0x9f, 0x72, 0xdd, 0x53, 0x2b,
|
0xd4, 0x56, 0x96, 0x28, 0xe5, 0x50, 0xbc, 0x64, 0x73, 0x6a, 0x3b, 0x82, 0xb2, 0x47, 0x99, 0xee,
|
||||||
0xda, 0x59, 0x49, 0x24, 0xb1, 0xfd, 0xd6, 0x9d, 0xe9, 0x91, 0x9b, 0xdc, 0xd8, 0xb2, 0x9f, 0x5e,
|
0xa9, 0x11, 0xce, 0xac, 0xc0, 0x93, 0xa8, 0xbe, 0x75, 0x66, 0x6a, 0xe8, 0x26, 0x26, 0xb6, 0xea,
|
||||||
0xe2, 0x0f, 0x20, 0x01, 0x0c, 0x21, 0x2b, 0x10, 0xb7, 0x50, 0x25, 0x06, 0x8f, 0xc8, 0x88, 0x6e,
|
0x25, 0x87, 0xf8, 0x43, 0x88, 0x01, 0x8d, 0xcb, 0x0a, 0xf8, 0x2e, 0x54, 0x8a, 0xc0, 0x36, 0x19,
|
||||||
0xec, 0x42, 0x75, 0xba, 0x3c, 0x78, 0x1d, 0xf2, 0x41, 0x48, 0xfc, 0x50, 0xa8, 0x30, 0xaf, 0xcb,
|
0xd1, 0xad, 0x87, 0x50, 0x9e, 0x2e, 0x0f, 0xde, 0x84, 0xac, 0x1f, 0x10, 0x2f, 0xe0, 0x2a, 0xcc,
|
||||||
0x05, 0x46, 0x90, 0xa5, 0xcc, 0x12, 0xb7, 0x5c, 0x5e, 0xe7, 0xff, 0x6e, 0x7c, 0x0c, 0xcb, 0x53,
|
0xaa, 0x62, 0x80, 0x11, 0xa4, 0xa9, 0x6d, 0xf0, 0x5d, 0x2e, 0xab, 0xb2, 0x7f, 0xb7, 0x3e, 0x86,
|
||||||
0x3f, 0x7f, 0x51, 0xc7, 0xda, 0x6f, 0x0a, 0xb0, 0xbe, 0x48, 0x73, 0x0b, 0xe5, 0x7f, 0x09, 0x0a,
|
0xd5, 0xa9, 0xc7, 0x9f, 0xd5, 0xb1, 0xf2, 0xbb, 0x1c, 0x6c, 0x2e, 0xd2, 0xdc, 0x42, 0xf9, 0x9f,
|
||||||
0x6c, 0x3c, 0x3a, 0xa1, 0xbe, 0x9a, 0x15, 0x11, 0xa2, 0x15, 0xae, 0x43, 0xde, 0x21, 0x27, 0xd4,
|
0x87, 0x9c, 0x3d, 0x1e, 0x1d, 0x53, 0x4f, 0x4e, 0xf3, 0x08, 0xe1, 0x08, 0xd7, 0x20, 0x6b, 0x91,
|
||||||
0x51, 0x73, 0x9b, 0xca, 0x56, 0x75, 0xe7, 0xde, 0x85, 0x54, 0xbd, 0xdd, 0xe6, 0x2e, 0xba, 0xf4,
|
0x63, 0x6a, 0xc9, 0x99, 0xed, 0xd4, 0x4e, 0x79, 0xf7, 0xf6, 0x99, 0x54, 0x5d, 0x6d, 0x31, 0x17,
|
||||||
0xc4, 0x9f, 0x41, 0x2e, 0xba, 0xe2, 0x78, 0x84, 0xbb, 0x17, 0x8b, 0xc0, 0xb5, 0xa8, 0x0b, 0x3f,
|
0x55, 0x78, 0xe2, 0xcf, 0x20, 0x13, 0x6e, 0x71, 0x2c, 0xc2, 0xad, 0xb3, 0x45, 0x60, 0x5a, 0x54,
|
||||||
0x7c, 0x05, 0x4a, 0xfc, 0xaf, 0xac, 0x6d, 0x41, 0xe4, 0x5c, 0xe4, 0x00, 0xaf, 0x2b, 0xde, 0x80,
|
0xb9, 0x1f, 0xbe, 0x04, 0x05, 0xf6, 0x57, 0xd4, 0x36, 0xc7, 0x73, 0xce, 0x33, 0x80, 0xd5, 0x15,
|
||||||
0xa2, 0x90, 0x99, 0x45, 0xe3, 0xd6, 0x90, 0xac, 0xf9, 0xc1, 0x58, 0x74, 0x40, 0xc6, 0x4e, 0x68,
|
0x6f, 0x41, 0x9e, 0xcb, 0xcc, 0xa0, 0xd1, 0xd1, 0x10, 0x8f, 0xd9, 0xc2, 0x18, 0x74, 0x40, 0xc6,
|
||||||
0xbc, 0x24, 0xce, 0x98, 0x0a, 0xc1, 0x94, 0xf4, 0x4a, 0x04, 0xfe, 0x9c, 0x63, 0xf8, 0x3a, 0x94,
|
0x56, 0xa0, 0xbd, 0x20, 0xd6, 0x98, 0x72, 0xc1, 0x14, 0xd4, 0x52, 0x08, 0xfe, 0x9a, 0x61, 0xf8,
|
||||||
0xa5, 0x2a, 0x6d, 0x66, 0xd1, 0xd7, 0xe2, 0xf6, 0xc9, 0xeb, 0x52, 0xa8, 0x2d, 0x8e, 0xf0, 0x9f,
|
0x0a, 0x14, 0x85, 0x2a, 0x4d, 0xdb, 0xa0, 0xaf, 0xf8, 0xee, 0x93, 0x55, 0x85, 0x50, 0x9b, 0x0c,
|
||||||
0x7f, 0x1e, 0xb8, 0x2c, 0x3e, 0x5a, 0xf1, 0x13, 0x1c, 0x10, 0x3f, 0xff, 0xf1, 0xec, 0xc5, 0x77,
|
0x61, 0x8f, 0x7f, 0xe6, 0x3b, 0x76, 0xb4, 0xb4, 0xfc, 0x11, 0x0c, 0xe0, 0x8f, 0xff, 0x78, 0x76,
|
||||||
0x6d, 0xf1, 0xf6, 0x66, 0xb5, 0x58, 0xfb, 0x53, 0x06, 0x72, 0xe2, 0x79, 0x5b, 0x81, 0xf2, 0xf1,
|
0xe3, 0xbb, 0xbc, 0x78, 0x7a, 0xb3, 0x5a, 0xac, 0xfc, 0x45, 0x82, 0x0c, 0x7f, 0xdf, 0xd6, 0xa0,
|
||||||
0x17, 0x5d, 0xcd, 0x68, 0x76, 0xfa, 0xfb, 0x6d, 0x0d, 0x29, 0xb8, 0x0a, 0x20, 0x80, 0x27, 0xed,
|
0xd8, 0x7f, 0xda, 0x55, 0xb4, 0x46, 0xe7, 0x68, 0xbf, 0xa5, 0xa0, 0x14, 0x2e, 0x03, 0x70, 0xe0,
|
||||||
0x4e, 0xfd, 0x18, 0x65, 0x92, 0x75, 0xeb, 0xe8, 0xf8, 0xf1, 0x43, 0x94, 0x4d, 0x1c, 0xfa, 0x12,
|
0x51, 0xab, 0x53, 0xeb, 0x23, 0x29, 0x1e, 0x37, 0xdb, 0xfd, 0x07, 0xf7, 0x50, 0x3a, 0x76, 0x38,
|
||||||
0xc8, 0xa5, 0x09, 0x0f, 0x76, 0x50, 0x1e, 0x23, 0xa8, 0xc8, 0x00, 0xad, 0x67, 0x5a, 0xf3, 0xf1,
|
0x12, 0x40, 0x26, 0x49, 0xb8, 0xbb, 0x8b, 0xb2, 0x18, 0x41, 0x49, 0x04, 0x68, 0x3e, 0x51, 0x1a,
|
||||||
0x43, 0x54, 0x98, 0x46, 0x1e, 0xec, 0xa0, 0x25, 0xbc, 0x0c, 0x25, 0x81, 0xec, 0x77, 0x3a, 0x6d,
|
0x0f, 0xee, 0xa1, 0xdc, 0x34, 0x72, 0x77, 0x17, 0xad, 0xe0, 0x55, 0x28, 0x70, 0x64, 0xbf, 0xd3,
|
||||||
0x54, 0x4c, 0x62, 0xf6, 0x8e, 0xf5, 0xd6, 0xd1, 0x01, 0x2a, 0x25, 0x31, 0x0f, 0xf4, 0x4e, 0xbf,
|
0x69, 0xa1, 0x7c, 0x1c, 0xb3, 0xd7, 0x57, 0x9b, 0xed, 0x03, 0x54, 0x88, 0x63, 0x1e, 0xa8, 0x9d,
|
||||||
0x8b, 0x20, 0x89, 0x70, 0xa8, 0xf5, 0x7a, 0xf5, 0x03, 0x0d, 0x95, 0x13, 0xc6, 0xfe, 0x17, 0xc7,
|
0xa3, 0x2e, 0x82, 0x38, 0xc2, 0xa1, 0xd2, 0xeb, 0xd5, 0x0e, 0x14, 0x54, 0x8c, 0x19, 0xfb, 0x4f,
|
||||||
0x5a, 0x0f, 0x55, 0xa6, 0xd2, 0x7a, 0xb0, 0x83, 0x96, 0x93, 0x9f, 0xd0, 0x8e, 0xfa, 0x87, 0xa8,
|
0xfb, 0x4a, 0x0f, 0x95, 0xa6, 0xd2, 0xba, 0xbb, 0x8b, 0x56, 0xe3, 0x47, 0x28, 0xed, 0xa3, 0x43,
|
||||||
0x8a, 0x57, 0x61, 0x59, 0xfe, 0x44, 0x9c, 0xc4, 0xca, 0x0c, 0xf4, 0xf8, 0x21, 0x42, 0x93, 0x44,
|
0x54, 0xc6, 0xeb, 0xb0, 0x2a, 0x1e, 0x11, 0x25, 0xb1, 0x36, 0x03, 0x3d, 0xb8, 0x87, 0xd0, 0x24,
|
||||||
0x64, 0x94, 0xd5, 0x29, 0xe0, 0xf1, 0x43, 0x84, 0x6b, 0x0d, 0xc8, 0x0b, 0x75, 0x61, 0x0c, 0xd5,
|
0x11, 0x11, 0x65, 0x7d, 0x0a, 0x78, 0x70, 0x0f, 0xe1, 0x4a, 0x1d, 0xb2, 0x5c, 0x5d, 0x18, 0x43,
|
||||||
0x76, 0x7d, 0x5f, 0x6b, 0x1b, 0x9d, 0xee, 0x71, 0xab, 0x73, 0x54, 0x6f, 0x23, 0x65, 0x82, 0xe9,
|
0xb9, 0x55, 0xdb, 0x57, 0x5a, 0x5a, 0xa7, 0xdb, 0x6f, 0x76, 0xda, 0xb5, 0x16, 0x4a, 0x4d, 0x30,
|
||||||
0xda, 0xcf, 0xfa, 0x2d, 0x5d, 0x6b, 0xa2, 0x4c, 0x1a, 0xeb, 0x6a, 0xf5, 0x63, 0xad, 0x89, 0xb2,
|
0x55, 0xf9, 0xd5, 0x51, 0x53, 0x55, 0x1a, 0x48, 0x4a, 0x62, 0x5d, 0xa5, 0xd6, 0x57, 0x1a, 0x28,
|
||||||
0xb5, 0xbb, 0xb0, 0xbe, 0xe8, 0x9e, 0x59, 0xf4, 0x64, 0xd4, 0xbe, 0x56, 0x60, 0x6d, 0xc1, 0x95,
|
0x5d, 0xd1, 0x61, 0x73, 0xd1, 0x3e, 0xb3, 0xf0, 0xcd, 0x48, 0x2c, 0xb1, 0xb4, 0x64, 0x89, 0x79,
|
||||||
0xb9, 0xf0, 0x29, 0xfa, 0x29, 0xe4, 0xa5, 0xd2, 0x64, 0x13, 0xb9, 0xb3, 0xf0, 0xee, 0x15, 0xba,
|
0xac, 0xb9, 0x25, 0xfe, 0x26, 0x05, 0x1b, 0x0b, 0xf6, 0xda, 0x85, 0x0f, 0xf9, 0x05, 0x64, 0x85,
|
||||||
0x9b, 0x6b, 0x24, 0xc2, 0x2f, 0xdd, 0x48, 0xb3, 0xe7, 0x34, 0x52, 0x1e, 0x62, 0x4e, 0x4e, 0xbf,
|
0x44, 0xc5, 0xe9, 0x73, 0x73, 0xe1, 0xa6, 0xcd, 0x05, 0x3b, 0x77, 0x02, 0x71, 0xbf, 0xe4, 0x09,
|
||||||
0x52, 0x40, 0x3d, 0x2f, 0xf6, 0x5b, 0x9e, 0xf7, 0xcc, 0xd4, 0xf3, 0xfe, 0xe9, 0x6c, 0x02, 0x37,
|
0x9c, 0x5e, 0x72, 0x02, 0xb3, 0x10, 0x73, 0x49, 0xfe, 0x26, 0x05, 0xf2, 0xb2, 0xd8, 0x6f, 0xd9,
|
||||||
0xce, 0xdf, 0xc3, 0x5c, 0x16, 0xdf, 0x28, 0x70, 0x69, 0xf1, 0xbc, 0xb1, 0x30, 0x87, 0xcf, 0xa0,
|
0x28, 0xa4, 0xa9, 0x8d, 0xe2, 0xd3, 0xd9, 0x04, 0xae, 0x2e, 0x9f, 0xc3, 0x5c, 0x16, 0xdf, 0xa6,
|
||||||
0x30, 0xa2, 0xe1, 0xa9, 0x1b, 0xf7, 0xdc, 0x0f, 0x17, 0xdc, 0xe4, 0xdc, 0x3c, 0x5b, 0xab, 0xc8,
|
0xe0, 0xfc, 0xe2, 0x46, 0x65, 0x61, 0x0e, 0x9f, 0x41, 0x6e, 0x44, 0x83, 0x13, 0x27, 0x3a, 0xac,
|
||||||
0x2b, 0xdd, 0x0a, 0xb2, 0xe7, 0x0d, 0x0d, 0x32, 0x9b, 0xb9, 0x4c, 0x7f, 0x9d, 0x81, 0x77, 0x17,
|
0x7f, 0xb2, 0xe0, 0x08, 0x60, 0xe6, 0xd9, 0x5a, 0x85, 0x5e, 0xc9, 0x33, 0x24, 0xbd, 0xac, 0xdb,
|
||||||
0x06, 0x5f, 0x98, 0xe8, 0x35, 0x00, 0x9b, 0x79, 0xe3, 0x50, 0xf6, 0x55, 0x79, 0xcd, 0x94, 0x04,
|
0x10, 0xd9, 0xcc, 0x65, 0xfa, 0x5b, 0x09, 0xde, 0x5b, 0x18, 0x7c, 0x61, 0xa2, 0x97, 0x01, 0x4c,
|
||||||
0x22, 0x1e, 0x61, 0x7e, 0x85, 0x8c, 0xc3, 0xc4, 0x9e, 0x15, 0x76, 0x90, 0x90, 0x20, 0xec, 0x4e,
|
0xdb, 0x1d, 0x07, 0xe2, 0x40, 0x16, 0xfb, 0x53, 0x81, 0x23, 0xfc, 0xdd, 0x67, 0x7b, 0xcf, 0x38,
|
||||||
0x12, 0xcd, 0x89, 0x44, 0xdf, 0x3f, 0x67, 0xa7, 0x73, 0x2d, 0xeb, 0x23, 0x40, 0xa6, 0x63, 0x53,
|
0x88, 0xed, 0x69, 0x6e, 0x07, 0x01, 0x71, 0xc2, 0xc3, 0x49, 0xa2, 0x19, 0x9e, 0xe8, 0x07, 0x4b,
|
||||||
0x16, 0x1a, 0x41, 0xe8, 0x53, 0x32, 0xb2, 0xd9, 0x50, 0xdc, 0xa3, 0xc5, 0xbd, 0xfc, 0x80, 0x38,
|
0x66, 0x3a, 0x77, 0xd6, 0x7d, 0x04, 0x48, 0xb7, 0x4c, 0x6a, 0x07, 0x9a, 0x1f, 0x78, 0x94, 0x8c,
|
||||||
0x01, 0xd5, 0x57, 0xa4, 0xb9, 0x17, 0x5b, 0xb9, 0x87, 0x68, 0x16, 0x7e, 0xca, 0xa3, 0x30, 0xe5,
|
0x4c, 0x7b, 0xc8, 0x37, 0xe0, 0xfc, 0x5e, 0x76, 0x40, 0x2c, 0x9f, 0xaa, 0x6b, 0xc2, 0xdc, 0x8b,
|
||||||
0x21, 0xcd, 0x89, 0x47, 0xed, 0x6f, 0x4b, 0x50, 0x4e, 0x4d, 0x67, 0xf8, 0x06, 0x54, 0x9e, 0x93,
|
0xac, 0xcc, 0x83, 0x9f, 0x32, 0x5e, 0xc2, 0x23, 0x37, 0xe5, 0x21, 0xcc, 0xb1, 0x47, 0xe5, 0xcf,
|
||||||
0x97, 0xc4, 0x88, 0x27, 0x6e, 0x59, 0x89, 0x32, 0xc7, 0xba, 0xd1, 0xd4, 0xfd, 0x11, 0xac, 0x0b,
|
0x2b, 0x50, 0x4c, 0xb4, 0x75, 0xf8, 0x2a, 0x94, 0x9e, 0x91, 0x17, 0x44, 0x8b, 0x5a, 0x75, 0x51,
|
||||||
0x8a, 0x3b, 0x0e, 0xa9, 0x6f, 0x98, 0x0e, 0x09, 0x02, 0x51, 0xb4, 0xa2, 0xa0, 0x62, 0x6e, 0xeb,
|
0x89, 0x22, 0xc3, 0xba, 0x61, 0xbb, 0xfe, 0x11, 0x6c, 0x72, 0x8a, 0x33, 0x0e, 0xa8, 0xa7, 0xe9,
|
||||||
0x70, 0x53, 0x23, 0xb6, 0xe0, 0x47, 0xb0, 0x26, 0x3c, 0x46, 0x63, 0x27, 0xb4, 0x3d, 0x87, 0x1a,
|
0x16, 0xf1, 0x7d, 0x5e, 0xb4, 0x3c, 0xa7, 0x62, 0x66, 0xeb, 0x30, 0x53, 0x3d, 0xb2, 0xe0, 0xfb,
|
||||||
0xfc, 0x1d, 0x20, 0x10, 0xf7, 0x69, 0x92, 0xd9, 0x2a, 0x67, 0x1c, 0x46, 0x04, 0x9e, 0x51, 0x80,
|
0xb0, 0xc1, 0x3d, 0x46, 0x63, 0x2b, 0x30, 0x5d, 0x8b, 0x6a, 0xec, 0xe3, 0xc1, 0xe7, 0x1b, 0x71,
|
||||||
0x0f, 0xe0, 0x9a, 0x70, 0x1b, 0x52, 0x46, 0x7d, 0x12, 0x52, 0x83, 0xfe, 0x72, 0x4c, 0x9c, 0xc0,
|
0x9c, 0xd9, 0x3a, 0x63, 0x1c, 0x86, 0x04, 0x96, 0x91, 0x8f, 0x1b, 0x70, 0x99, 0xbb, 0x0d, 0xa9,
|
||||||
0x20, 0xcc, 0x32, 0x4e, 0x49, 0x70, 0xaa, 0xae, 0xa7, 0x03, 0x5c, 0xe6, 0xdc, 0x83, 0x88, 0xaa,
|
0x4d, 0x3d, 0x12, 0x50, 0x8d, 0x7e, 0x35, 0x26, 0x96, 0xaf, 0x11, 0xdb, 0xd0, 0x4e, 0x88, 0x7f,
|
||||||
0x09, 0x66, 0x9d, 0x59, 0x9f, 0x93, 0xe0, 0x14, 0xef, 0xc1, 0x25, 0x11, 0x28, 0x08, 0x7d, 0x9b,
|
0x22, 0x6f, 0xb2, 0x00, 0xfb, 0x92, 0x9c, 0x52, 0x2f, 0x32, 0xe2, 0x41, 0xc8, 0x53, 0x38, 0xad,
|
||||||
0x0d, 0x0d, 0xf3, 0x94, 0x9a, 0x2f, 0x8c, 0x71, 0x38, 0xd8, 0x55, 0xaf, 0xa4, 0x23, 0x88, 0x24,
|
0x66, 0x1b, 0x9f, 0x13, 0xff, 0x04, 0xef, 0xc1, 0x79, 0x1e, 0xc5, 0x0f, 0x3c, 0xd3, 0x1e, 0x6a,
|
||||||
0x7b, 0x82, 0xd3, 0xe0, 0x94, 0x7e, 0x38, 0xd8, 0xc5, 0x3d, 0xa8, 0xf0, 0xf3, 0x18, 0xd9, 0x5f,
|
0xfa, 0x09, 0xd5, 0x9f, 0x6b, 0xe3, 0x60, 0xf0, 0x50, 0xbe, 0x94, 0x7c, 0x3e, 0xcf, 0xb0, 0xc7,
|
||||||
0x51, 0x63, 0xe0, 0xfa, 0xa2, 0x47, 0x54, 0x17, 0x3c, 0xdc, 0xa9, 0x22, 0x6e, 0x77, 0x22, 0x87,
|
0x39, 0x75, 0x46, 0x39, 0x0a, 0x06, 0x0f, 0x71, 0x0f, 0x4a, 0x6c, 0x31, 0x46, 0xe6, 0xd7, 0x54,
|
||||||
0x43, 0xd7, 0xa2, 0x7b, 0xf9, 0x5e, 0x57, 0xd3, 0x9a, 0x7a, 0x39, 0x8e, 0xf2, 0xc4, 0xf5, 0xb9,
|
0x1b, 0x38, 0x1e, 0x3f, 0x59, 0xca, 0x0b, 0xde, 0xec, 0x44, 0x05, 0xab, 0x9d, 0xd0, 0xe1, 0xd0,
|
||||||
0xa6, 0x86, 0x6e, 0x52, 0xe3, 0xb2, 0xd4, 0xd4, 0xd0, 0x8d, 0x2b, 0xfc, 0x08, 0xd6, 0x4c, 0x53,
|
0x31, 0xe8, 0x5e, 0xb6, 0xd7, 0x55, 0x94, 0x86, 0x5a, 0x8c, 0xa2, 0x3c, 0x72, 0x3c, 0x26, 0xa8,
|
||||||
0x6e, 0xdb, 0x36, 0x8d, 0x68, 0x58, 0x0f, 0x54, 0x34, 0x55, 0x2f, 0xd3, 0x3c, 0x90, 0x84, 0x48,
|
0xa1, 0x13, 0x17, 0xb8, 0x28, 0x04, 0x35, 0x74, 0xa2, 0xf2, 0xde, 0x87, 0x0d, 0x5d, 0x17, 0x73,
|
||||||
0xe6, 0x01, 0xfe, 0x04, 0xde, 0x9d, 0xd4, 0x2b, 0xed, 0xb8, 0x3a, 0xb7, 0xcb, 0x59, 0xd7, 0x47,
|
0x36, 0x75, 0x2d, 0x6c, 0xf1, 0x7d, 0x19, 0x4d, 0x15, 0x4b, 0xd7, 0x0f, 0x04, 0x21, 0xd4, 0xb8,
|
||||||
0xb0, 0xe6, 0x9d, 0xcd, 0x3b, 0xe2, 0xa9, 0x5f, 0xf4, 0xce, 0x66, 0xdd, 0x6e, 0x89, 0x17, 0x30,
|
0x8f, 0x3f, 0x81, 0xf7, 0x26, 0xc5, 0x4a, 0x3a, 0xae, 0xcf, 0xcd, 0x72, 0xd6, 0xf5, 0x3e, 0x6c,
|
||||||
0x9f, 0x9a, 0x24, 0xa4, 0x96, 0xfa, 0x5e, 0x9a, 0x9d, 0x32, 0xe0, 0xfb, 0x80, 0x4c, 0xd3, 0xa0,
|
0xb8, 0xa7, 0xf3, 0x8e, 0x78, 0xea, 0x89, 0xee, 0xe9, 0xac, 0xdb, 0x75, 0xfe, 0xd9, 0xe6, 0x51,
|
||||||
0x8c, 0x9c, 0x38, 0xd4, 0x20, 0x3e, 0x65, 0x24, 0x50, 0xaf, 0xa7, 0xc9, 0x55, 0xd3, 0xd4, 0x84,
|
0x9d, 0x04, 0xd4, 0x90, 0x2f, 0x24, 0xd9, 0x09, 0x03, 0xbe, 0x03, 0x48, 0xd7, 0x35, 0x6a, 0x93,
|
||||||
0xb5, 0x2e, 0x8c, 0xf8, 0x2e, 0xac, 0xba, 0x27, 0xcf, 0x4d, 0x29, 0x2e, 0xc3, 0xf3, 0xe9, 0xc0,
|
0x63, 0x8b, 0x6a, 0xc4, 0xa3, 0x36, 0xf1, 0xe5, 0x2b, 0x49, 0x72, 0x59, 0xd7, 0x15, 0x6e, 0xad,
|
||||||
0x7e, 0xad, 0xde, 0x14, 0x65, 0x5a, 0xe1, 0x06, 0x21, 0xad, 0xae, 0x80, 0xf1, 0x1d, 0x40, 0x66,
|
0x71, 0x23, 0xbe, 0x05, 0xeb, 0xce, 0xf1, 0x33, 0x5d, 0x28, 0x4b, 0x73, 0x3d, 0x3a, 0x30, 0x5f,
|
||||||
0x70, 0x4a, 0x7c, 0x4f, 0x34, 0xe9, 0xc0, 0x23, 0x26, 0x55, 0x6f, 0x49, 0xaa, 0xc4, 0x8f, 0x62,
|
0xc9, 0xd7, 0x78, 0x99, 0xd6, 0x98, 0x81, 0xeb, 0xaa, 0xcb, 0x61, 0x7c, 0x13, 0x90, 0xee, 0x9f,
|
||||||
0x18, 0x6b, 0x70, 0x9d, 0x6f, 0x9e, 0x11, 0xe6, 0x1a, 0xe3, 0x80, 0x1a, 0x93, 0x14, 0x93, 0xb3,
|
0x10, 0xcf, 0xe5, 0x47, 0xbb, 0xef, 0x12, 0x9d, 0xca, 0xd7, 0x05, 0x55, 0xe0, 0xed, 0x08, 0x66,
|
||||||
0xf8, 0x90, 0xa7, 0xa5, 0x5f, 0x8d, 0x69, 0xfd, 0x80, 0x36, 0x13, 0x52, 0x7c, 0x3c, 0xcf, 0x60,
|
0xca, 0xf6, 0x5f, 0x9a, 0x83, 0x20, 0x8a, 0x78, 0x43, 0x28, 0x9b, 0x63, 0x61, 0xb4, 0x27, 0xb0,
|
||||||
0x7d, 0xcc, 0x6c, 0x16, 0x52, 0xdf, 0xf3, 0x29, 0x77, 0x96, 0x0f, 0xac, 0xfa, 0xcf, 0xa5, 0x73,
|
0x39, 0xb6, 0x4d, 0x3b, 0xa0, 0x9e, 0xeb, 0x51, 0xd6, 0xc4, 0x8b, 0x37, 0x51, 0xfe, 0xcf, 0xca,
|
||||||
0x86, 0xee, 0x7e, 0x9a, 0x2d, 0x45, 0xa2, 0xaf, 0x8d, 0xe7, 0xc1, 0xda, 0x1e, 0x54, 0xd2, 0xda,
|
0x92, 0x36, 0xfc, 0x28, 0xc9, 0x16, 0x02, 0x50, 0x37, 0xc6, 0xf3, 0x60, 0x65, 0x0f, 0x4a, 0x49,
|
||||||
0xc1, 0x25, 0x90, 0xea, 0x41, 0x0a, 0xef, 0xa8, 0x8d, 0x4e, 0x93, 0xf7, 0xc2, 0x2f, 0x35, 0x94,
|
0x5d, 0xe0, 0x02, 0x08, 0x65, 0xa0, 0x14, 0x3b, 0x63, 0xeb, 0x9d, 0x06, 0x3b, 0x1d, 0xbf, 0x54,
|
||||||
0xe1, 0x3d, 0xb9, 0xdd, 0x3a, 0xd6, 0x0c, 0xbd, 0x7f, 0x74, 0xdc, 0x3a, 0xd4, 0x50, 0xf6, 0x6e,
|
0x90, 0xc4, 0x4e, 0xe9, 0x56, 0xb3, 0xaf, 0x68, 0xea, 0x51, 0xbb, 0xdf, 0x3c, 0x54, 0x50, 0xfa,
|
||||||
0xa9, 0xf8, 0xaf, 0x25, 0xf4, 0xe6, 0xcd, 0x9b, 0x37, 0x99, 0xda, 0x5f, 0x32, 0x50, 0x9d, 0x9e,
|
0x56, 0x21, 0xff, 0xdf, 0x15, 0xf4, 0xfa, 0xf5, 0xeb, 0xd7, 0x52, 0xe5, 0x6f, 0x12, 0x94, 0xa7,
|
||||||
0x83, 0xf1, 0x4f, 0xe0, 0xbd, 0xf8, 0xa5, 0x35, 0xa0, 0xa1, 0xf1, 0xca, 0xf6, 0x85, 0x9c, 0x47,
|
0x3b, 0x63, 0xfc, 0x73, 0xb8, 0x10, 0x7d, 0xc6, 0xfa, 0x34, 0xd0, 0x5e, 0x9a, 0x1e, 0x97, 0xea,
|
||||||
0x44, 0x4e, 0x92, 0xc9, 0x49, 0xac, 0x47, 0xac, 0x1e, 0x0d, 0x7f, 0x61, 0xfb, 0x5c, 0xac, 0x23,
|
0x88, 0x88, 0xde, 0x32, 0xae, 0xf2, 0x66, 0xc8, 0xea, 0xd1, 0xe0, 0x0b, 0xd3, 0x63, 0x42, 0x1c,
|
||||||
0x12, 0xe2, 0x36, 0x5c, 0x67, 0xae, 0x11, 0x84, 0x84, 0x59, 0xc4, 0xb7, 0x8c, 0xc9, 0xe7, 0x02,
|
0x91, 0x00, 0xb7, 0xe0, 0x8a, 0xed, 0x68, 0x7e, 0x40, 0x6c, 0x83, 0x78, 0x86, 0x36, 0xb9, 0x40,
|
||||||
0x83, 0x98, 0x26, 0x0d, 0x02, 0x57, 0x76, 0x92, 0x24, 0xca, 0x55, 0xe6, 0xf6, 0x22, 0xf2, 0xe4,
|
0xd0, 0x88, 0xae, 0x53, 0xdf, 0x77, 0xc4, 0x11, 0x11, 0x47, 0x79, 0xdf, 0x76, 0x7a, 0x21, 0x79,
|
||||||
0x8a, 0xad, 0x47, 0xd4, 0x19, 0xd5, 0x64, 0xcf, 0x53, 0xcd, 0x15, 0x28, 0x8d, 0x88, 0x67, 0x50,
|
0xb2, 0x77, 0xd6, 0x42, 0xea, 0x8c, 0x22, 0xd2, 0xcb, 0x14, 0x71, 0x09, 0x0a, 0x23, 0xe2, 0x6a,
|
||||||
0x16, 0xfa, 0x67, 0x62, 0x7a, 0x2b, 0xea, 0xc5, 0x11, 0xf1, 0x34, 0xbe, 0xfe, 0xfe, 0xce, 0x20,
|
0xd4, 0x0e, 0xbc, 0x53, 0xde, 0xcf, 0xe5, 0xd5, 0xfc, 0x88, 0xb8, 0x0a, 0x1b, 0xbf, 0xbb, 0x35,
|
||||||
0x5d, 0xc7, 0xbf, 0x67, 0xa1, 0x92, 0x9e, 0xe0, 0xf8, 0x40, 0x6c, 0x8a, 0x6b, 0x5e, 0x11, 0xb7,
|
0x48, 0xd6, 0xf1, 0x9f, 0x69, 0x28, 0x25, 0x7b, 0x3a, 0xd6, 0x22, 0xeb, 0x7c, 0xff, 0x4e, 0xf1,
|
||||||
0xc0, 0x07, 0xdf, 0x3a, 0xef, 0x6d, 0x37, 0xf8, 0xfd, 0xbf, 0x57, 0x90, 0x73, 0x95, 0x2e, 0x3d,
|
0x37, 0xfc, 0xc3, 0x37, 0x76, 0x80, 0xd5, 0x3a, 0xdb, 0xd8, 0xf7, 0x72, 0xa2, 0xd3, 0x52, 0x85,
|
||||||
0x79, 0xef, 0xe5, 0x5a, 0xa3, 0x72, 0x5a, 0x2f, 0xea, 0xd1, 0x0a, 0x1f, 0x40, 0xe1, 0x79, 0x20,
|
0x27, 0x3b, 0x54, 0xd9, 0x3b, 0x4d, 0x45, 0xff, 0x9e, 0x57, 0xc3, 0x11, 0x3e, 0x80, 0xdc, 0x33,
|
||||||
0x62, 0x17, 0x44, 0xec, 0x9b, 0xdf, 0x1e, 0xfb, 0x69, 0x4f, 0x04, 0x2f, 0x3d, 0xed, 0x19, 0x47,
|
0x9f, 0xc7, 0xce, 0xf1, 0xd8, 0xd7, 0xde, 0x1c, 0xfb, 0x71, 0x8f, 0x07, 0x2f, 0x3c, 0xee, 0x69,
|
||||||
0x1d, 0xfd, 0xb0, 0xde, 0xd6, 0x23, 0x77, 0x7c, 0x19, 0x72, 0x0e, 0xf9, 0xea, 0x6c, 0xba, 0x53,
|
0xed, 0x8e, 0x7a, 0x58, 0x6b, 0xa9, 0xa1, 0x3b, 0xbe, 0x08, 0x19, 0x8b, 0x7c, 0x7d, 0x3a, 0x7d,
|
||||||
0x08, 0xe8, 0xa2, 0x85, 0xbf, 0x0c, 0xb9, 0x57, 0x94, 0xbc, 0x98, 0xbe, 0x9f, 0x05, 0xf4, 0x3d,
|
0x04, 0x70, 0xe8, 0xac, 0x85, 0xbf, 0x08, 0x99, 0x97, 0x94, 0x3c, 0x9f, 0xde, 0x78, 0x39, 0xf4,
|
||||||
0x4a, 0xff, 0x3e, 0xe4, 0x45, 0xbd, 0x30, 0x40, 0x54, 0x31, 0xf4, 0x0e, 0x2e, 0x42, 0xae, 0xd1,
|
0x0e, 0xa5, 0x7f, 0x07, 0xb2, 0xbc, 0x5e, 0x18, 0x20, 0xac, 0x18, 0x3a, 0x87, 0xf3, 0x90, 0xa9,
|
||||||
0xd1, 0xb9, 0xfc, 0x11, 0x54, 0x24, 0x6a, 0x74, 0x5b, 0x5a, 0x43, 0x43, 0x99, 0xda, 0x23, 0x28,
|
0x77, 0x54, 0x26, 0x7f, 0x04, 0x25, 0x81, 0x6a, 0xdd, 0xa6, 0x52, 0x57, 0x90, 0x54, 0xb9, 0x0f,
|
||||||
0xc8, 0x22, 0xf0, 0x47, 0x23, 0x29, 0x03, 0x7a, 0x27, 0x5a, 0x46, 0x31, 0x94, 0xd8, 0xda, 0x3f,
|
0x39, 0x51, 0x04, 0xf6, 0x6a, 0xc4, 0x65, 0x40, 0xe7, 0xc2, 0x61, 0x18, 0x23, 0x15, 0x59, 0x8f,
|
||||||
0xdc, 0xd7, 0x74, 0x94, 0x49, 0x1f, 0xef, 0x9f, 0x15, 0x28, 0xa7, 0x06, 0x2a, 0xde, 0xca, 0x89,
|
0x0e, 0xf7, 0x15, 0x15, 0x49, 0xc9, 0xe5, 0xf5, 0xa1, 0x94, 0x6c, 0xe7, 0x7e, 0x18, 0x4d, 0xfd,
|
||||||
0xe3, 0xb8, 0xaf, 0x0c, 0xe2, 0xd8, 0x24, 0x88, 0xce, 0x07, 0x04, 0x54, 0xe7, 0xc8, 0x45, 0xeb,
|
0x35, 0x05, 0xc5, 0x44, 0x7b, 0xc6, 0x1a, 0x03, 0x62, 0x59, 0xce, 0x4b, 0x8d, 0x58, 0x26, 0xf1,
|
||||||
0xf7, 0x3f, 0xd1, 0xe6, 0x1f, 0x14, 0x40, 0xb3, 0xc3, 0xd8, 0x4c, 0x82, 0xca, 0x0f, 0x9a, 0xe0,
|
0x43, 0x51, 0x00, 0x87, 0x6a, 0x0c, 0x39, 0xeb, 0xa2, 0xfd, 0x20, 0xc9, 0xff, 0x31, 0x05, 0x68,
|
||||||
0xef, 0x15, 0xa8, 0x4e, 0x4f, 0x60, 0x33, 0xe9, 0xdd, 0xf8, 0x41, 0xd3, 0xfb, 0x9d, 0x02, 0xcb,
|
0xb6, 0xb5, 0x9b, 0x49, 0x30, 0xf5, 0xa3, 0x26, 0xf8, 0x87, 0x14, 0x94, 0xa7, 0xfb, 0xb9, 0x99,
|
||||||
0x53, 0x73, 0xd7, 0xff, 0x55, 0x76, 0xbf, 0xcd, 0xc2, 0xda, 0x02, 0x3f, 0x5c, 0x8f, 0x06, 0x54,
|
0xf4, 0xae, 0xfe, 0xa8, 0xe9, 0xfd, 0x4b, 0x82, 0xd5, 0xa9, 0x2e, 0xee, 0xac, 0xd9, 0x7d, 0x05,
|
||||||
0x39, 0x33, 0xff, 0xf8, 0x22, 0xbf, 0xb5, 0xcd, 0xfb, 0x5f, 0x97, 0xf8, 0x61, 0x34, 0xcf, 0xde,
|
0xeb, 0xa6, 0x41, 0x47, 0xae, 0x13, 0x50, 0x5b, 0x3f, 0xd5, 0x2c, 0xfa, 0x82, 0x5a, 0x72, 0x85,
|
||||||
0x01, 0x64, 0x5b, 0x94, 0x85, 0xf6, 0xc0, 0xa6, 0x7e, 0xf4, 0x6e, 0x2c, 0xa7, 0xd6, 0x95, 0x09,
|
0x6f, 0x14, 0x77, 0xde, 0xdc, 0x27, 0x56, 0x9b, 0x13, 0xbf, 0x16, 0x73, 0xdb, 0xdb, 0x68, 0x36,
|
||||||
0x2e, 0x5f, 0x8f, 0x7f, 0x04, 0xd8, 0x73, 0x03, 0x3b, 0xb4, 0x5f, 0x52, 0xc3, 0x66, 0xf1, 0x8b,
|
0x94, 0xc3, 0x6e, 0xa7, 0xaf, 0xb4, 0xeb, 0x4f, 0xb5, 0xa3, 0xf6, 0x2f, 0xdb, 0x9d, 0x2f, 0xda,
|
||||||
0x34, 0x9f, 0x62, 0x73, 0x3a, 0x8a, 0x2d, 0x2d, 0x16, 0x26, 0x6c, 0x46, 0x87, 0x64, 0x86, 0xcd,
|
0x2a, 0x32, 0x67, 0x68, 0xef, 0xf0, 0x55, 0xef, 0x02, 0x9a, 0x4d, 0x0a, 0x5f, 0x80, 0x45, 0x69,
|
||||||
0xaf, 0xa1, 0xac, 0x8e, 0x62, 0x4b, 0xc2, 0xbe, 0x01, 0x15, 0xcb, 0x1d, 0xf3, 0x81, 0x40, 0xf2,
|
0xa1, 0x73, 0x78, 0x03, 0xd6, 0xda, 0x1d, 0xad, 0xd7, 0x6c, 0x28, 0x9a, 0xf2, 0xe8, 0x91, 0x52,
|
||||||
0xf8, 0xad, 0xa7, 0xe8, 0x65, 0x89, 0x25, 0x94, 0x68, 0x62, 0x9b, 0xbc, 0xc1, 0x57, 0xf4, 0xb2,
|
0xef, 0xf7, 0xc4, 0x87, 0x73, 0xcc, 0xee, 0x4f, 0xbf, 0xd4, 0xbf, 0x4f, 0xc3, 0xc6, 0x82, 0x4c,
|
||||||
0xc4, 0x24, 0xe5, 0x36, 0xac, 0x90, 0xe1, 0xd0, 0xe7, 0xc1, 0xe3, 0x40, 0x72, 0x0c, 0xad, 0x26,
|
0x70, 0x2d, 0xec, 0xd9, 0xc5, 0x67, 0xc4, 0xcf, 0xce, 0x92, 0x7d, 0x95, 0x75, 0x05, 0x5d, 0xe2,
|
||||||
0xb0, 0x20, 0x6e, 0x3c, 0x85, 0x62, 0x5c, 0x07, 0xde, 0x58, 0x78, 0x25, 0x0c, 0x4f, 0x7e, 0x47,
|
0x05, 0x61, 0x8b, 0x7f, 0x13, 0x58, 0x95, 0xec, 0xc0, 0x1c, 0x98, 0xd4, 0x0b, 0xef, 0x19, 0x44,
|
||||||
0xc9, 0xf0, 0x97, 0x7a, 0x16, 0x1b, 0x6f, 0x40, 0xc5, 0x0e, 0x8c, 0xc9, 0x07, 0xbd, 0xcc, 0x66,
|
0x23, 0xbf, 0x36, 0xc1, 0xc5, 0x55, 0xc3, 0x4f, 0x01, 0xbb, 0x8e, 0x6f, 0x06, 0xe6, 0x0b, 0xaa,
|
||||||
0x66, 0xab, 0xa8, 0x97, 0xed, 0x20, 0xf9, 0x82, 0x53, 0xfb, 0x26, 0x03, 0xd5, 0xe9, 0x0f, 0x92,
|
0x99, 0x76, 0x74, 0x29, 0xc1, 0x1a, 0xfb, 0x8c, 0x8a, 0x22, 0x4b, 0xd3, 0x0e, 0x62, 0xb6, 0x4d,
|
||||||
0xb8, 0x09, 0x45, 0xc7, 0x35, 0x89, 0x10, 0x82, 0xfc, 0x1a, 0xbe, 0xf5, 0x96, 0x6f, 0x98, 0xdb,
|
0x87, 0x64, 0x86, 0xcd, 0x36, 0xf0, 0xb4, 0x8a, 0x22, 0x4b, 0xcc, 0xbe, 0x0a, 0x25, 0xc3, 0x19,
|
||||||
0xed, 0x88, 0xaf, 0x27, 0x9e, 0x1b, 0x7f, 0x55, 0xa0, 0x18, 0xc3, 0xf8, 0x12, 0xe4, 0x3c, 0x12,
|
0xb3, 0x36, 0x49, 0xf0, 0xd8, 0x79, 0x91, 0x52, 0x8b, 0x02, 0x8b, 0x29, 0x61, 0x1f, 0x3b, 0xb9,
|
||||||
0x9e, 0x8a, 0x70, 0xf9, 0xfd, 0x0c, 0x52, 0x74, 0xb1, 0xe6, 0x78, 0xe0, 0x11, 0x26, 0x24, 0x10,
|
0x0d, 0x29, 0xa9, 0x45, 0x81, 0x09, 0xca, 0x0d, 0x58, 0x23, 0xc3, 0xa1, 0xc7, 0x82, 0x47, 0x81,
|
||||||
0xe1, 0x7c, 0xcd, 0xcf, 0xd5, 0xa1, 0xc4, 0x12, 0x03, 0xae, 0x3b, 0x1a, 0x51, 0x16, 0x06, 0xf1,
|
0x44, 0x67, 0x5e, 0x8e, 0x61, 0x4e, 0xdc, 0x7a, 0x0c, 0xf9, 0xa8, 0x0e, 0xec, 0x48, 0x66, 0x95,
|
||||||
0xb9, 0x46, 0x78, 0x23, 0x82, 0xf1, 0x3d, 0x58, 0x0d, 0x7d, 0x62, 0x3b, 0x53, 0xdc, 0x9c, 0xe0,
|
0xd0, 0x5c, 0x71, 0x27, 0x25, 0xed, 0x14, 0xd4, 0xbc, 0x1d, 0x19, 0xaf, 0x42, 0xc9, 0xf4, 0xb5,
|
||||||
0xa2, 0xd8, 0x90, 0x90, 0xf7, 0xe0, 0x72, 0x1c, 0xd7, 0xa2, 0x21, 0x31, 0x4f, 0xa9, 0x35, 0x71,
|
0xc9, 0xe5, 0xa8, 0xb4, 0x2d, 0xed, 0xe4, 0xd5, 0xa2, 0xe9, 0xc7, 0xb7, 0x61, 0x95, 0x6f, 0x25,
|
||||||
0x2a, 0x88, 0xaf, 0x5d, 0xef, 0x45, 0x84, 0x66, 0x64, 0x8f, 0x7d, 0xf7, 0x9f, 0xc1, 0x9a, 0xe9,
|
0x28, 0x4f, 0x5f, 0xee, 0xe2, 0x06, 0xe4, 0x2d, 0x47, 0x27, 0x5c, 0x5a, 0xe2, 0x97, 0x85, 0x9d,
|
||||||
0x8e, 0x66, 0x2b, 0xb1, 0x8f, 0x66, 0xde, 0xbb, 0x82, 0xcf, 0x95, 0x2f, 0x61, 0x32, 0x54, 0x7c,
|
0xb7, 0xdc, 0x07, 0x57, 0x5b, 0x21, 0x5f, 0x8d, 0x3d, 0xb7, 0xfe, 0x9e, 0x82, 0x7c, 0x04, 0xe3,
|
||||||
0x9d, 0xc9, 0x1e, 0x74, 0xf7, 0xff, 0x98, 0xd9, 0x38, 0x90, 0x7e, 0xdd, 0xb8, 0x82, 0x3a, 0x1d,
|
0xf3, 0x90, 0x71, 0x49, 0x70, 0xc2, 0xc3, 0x65, 0xf7, 0x25, 0x94, 0x52, 0xf9, 0x98, 0xe1, 0xbe,
|
||||||
0x38, 0xd4, 0xe4, 0xd5, 0xf9, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x78, 0x42, 0x69, 0x71, 0xb3,
|
0x4b, 0x6c, 0x2e, 0x81, 0x10, 0x67, 0x63, 0xb6, 0xae, 0x16, 0x25, 0x06, 0x6f, 0xfb, 0x9d, 0xd1,
|
||||||
0x18, 0x00, 0x00,
|
0x88, 0xda, 0x81, 0x1f, 0xad, 0x6b, 0x88, 0xd7, 0x43, 0x18, 0xdf, 0x86, 0xf5, 0xc0, 0x23, 0xa6,
|
||||||
|
0x35, 0xc5, 0xcd, 0x70, 0x2e, 0x8a, 0x0c, 0x31, 0x79, 0x0f, 0x2e, 0x46, 0x71, 0x0d, 0x1a, 0x10,
|
||||||
|
0xfd, 0x84, 0x1a, 0x13, 0xa7, 0x1c, 0xbf, 0x39, 0xbc, 0x10, 0x12, 0x1a, 0xa1, 0x3d, 0xf2, 0xad,
|
||||||
|
0xfc, 0x23, 0x05, 0xeb, 0xd1, 0x87, 0x8a, 0x11, 0x17, 0xeb, 0x10, 0x80, 0xd8, 0xb6, 0x13, 0x24,
|
||||||
|
0xcb, 0x35, 0x2f, 0xe5, 0x39, 0xbf, 0x6a, 0x2d, 0x76, 0x52, 0x13, 0x01, 0xb6, 0x46, 0x00, 0x13,
|
||||||
|
0xcb, 0xd2, 0xb2, 0x5d, 0x81, 0x62, 0x78, 0x73, 0xcf, 0x7f, 0xfe, 0x11, 0x9f, 0xb6, 0x20, 0x20,
|
||||||
|
0xf6, 0x45, 0x83, 0x37, 0x21, 0x7b, 0x4c, 0x87, 0xa6, 0x1d, 0xde, 0x27, 0x8a, 0x41, 0x74, 0x4b,
|
||||||
|
0x99, 0x89, 0x6f, 0x29, 0xf7, 0x9f, 0xc0, 0x86, 0xee, 0x8c, 0x66, 0xd3, 0xdd, 0x47, 0x33, 0x9f,
|
||||||
|
0xd7, 0xfe, 0xe7, 0xa9, 0x2f, 0x61, 0xd2, 0x62, 0x7e, 0x23, 0xa5, 0x0f, 0xba, 0xfb, 0x7f, 0x92,
|
||||||
|
0xb6, 0x0e, 0x84, 0x5f, 0x37, 0x9a, 0xa6, 0x4a, 0x07, 0x16, 0xd5, 0x59, 0xea, 0xff, 0x0f, 0x00,
|
||||||
|
0x00, 0xff, 0xff, 0xa0, 0xbf, 0x63, 0x15, 0xd3, 0x1a, 0x00, 0x00,
|
||||||
}
|
}
|
||||||
|
|
71
vendor/github.com/gogo/protobuf/protoc-gen-gogo/descriptor/descriptor_gostring.gen.go
generated
vendored
71
vendor/github.com/gogo/protobuf/protoc-gen-gogo/descriptor/descriptor_gostring.gen.go
generated
vendored
|
@ -21,12 +21,14 @@ It has these top-level messages:
|
||||||
FileOptions
|
FileOptions
|
||||||
MessageOptions
|
MessageOptions
|
||||||
FieldOptions
|
FieldOptions
|
||||||
|
OneofOptions
|
||||||
EnumOptions
|
EnumOptions
|
||||||
EnumValueOptions
|
EnumValueOptions
|
||||||
ServiceOptions
|
ServiceOptions
|
||||||
MethodOptions
|
MethodOptions
|
||||||
UninterpretedOption
|
UninterpretedOption
|
||||||
SourceCodeInfo
|
SourceCodeInfo
|
||||||
|
GeneratedCodeInfo
|
||||||
*/
|
*/
|
||||||
package descriptor
|
package descriptor
|
||||||
|
|
||||||
|
@ -231,11 +233,14 @@ func (this *OneofDescriptorProto) GoString() string {
|
||||||
if this == nil {
|
if this == nil {
|
||||||
return "nil"
|
return "nil"
|
||||||
}
|
}
|
||||||
s := make([]string, 0, 5)
|
s := make([]string, 0, 6)
|
||||||
s = append(s, "&descriptor.OneofDescriptorProto{")
|
s = append(s, "&descriptor.OneofDescriptorProto{")
|
||||||
if this.Name != nil {
|
if this.Name != nil {
|
||||||
s = append(s, "Name: "+valueToGoStringDescriptor(this.Name, "string")+",\n")
|
s = append(s, "Name: "+valueToGoStringDescriptor(this.Name, "string")+",\n")
|
||||||
}
|
}
|
||||||
|
if this.Options != nil {
|
||||||
|
s = append(s, "Options: "+fmt.Sprintf("%#v", this.Options)+",\n")
|
||||||
|
}
|
||||||
if this.XXX_unrecognized != nil {
|
if this.XXX_unrecognized != nil {
|
||||||
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
|
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
|
||||||
}
|
}
|
||||||
|
@ -383,8 +388,8 @@ func (this *FileOptions) GoString() string {
|
||||||
if this.CsharpNamespace != nil {
|
if this.CsharpNamespace != nil {
|
||||||
s = append(s, "CsharpNamespace: "+valueToGoStringDescriptor(this.CsharpNamespace, "string")+",\n")
|
s = append(s, "CsharpNamespace: "+valueToGoStringDescriptor(this.CsharpNamespace, "string")+",\n")
|
||||||
}
|
}
|
||||||
if this.JavananoUseDeprecatedPackage != nil {
|
if this.SwiftPrefix != nil {
|
||||||
s = append(s, "JavananoUseDeprecatedPackage: "+valueToGoStringDescriptor(this.JavananoUseDeprecatedPackage, "bool")+",\n")
|
s = append(s, "SwiftPrefix: "+valueToGoStringDescriptor(this.SwiftPrefix, "string")+",\n")
|
||||||
}
|
}
|
||||||
if this.UninterpretedOption != nil {
|
if this.UninterpretedOption != nil {
|
||||||
s = append(s, "UninterpretedOption: "+fmt.Sprintf("%#v", this.UninterpretedOption)+",\n")
|
s = append(s, "UninterpretedOption: "+fmt.Sprintf("%#v", this.UninterpretedOption)+",\n")
|
||||||
|
@ -458,6 +463,22 @@ func (this *FieldOptions) GoString() string {
|
||||||
s = append(s, "}")
|
s = append(s, "}")
|
||||||
return strings.Join(s, "")
|
return strings.Join(s, "")
|
||||||
}
|
}
|
||||||
|
func (this *OneofOptions) GoString() string {
|
||||||
|
if this == nil {
|
||||||
|
return "nil"
|
||||||
|
}
|
||||||
|
s := make([]string, 0, 5)
|
||||||
|
s = append(s, "&descriptor.OneofOptions{")
|
||||||
|
if this.UninterpretedOption != nil {
|
||||||
|
s = append(s, "UninterpretedOption: "+fmt.Sprintf("%#v", this.UninterpretedOption)+",\n")
|
||||||
|
}
|
||||||
|
s = append(s, "XXX_InternalExtensions: "+extensionToGoStringDescriptor(this)+",\n")
|
||||||
|
if this.XXX_unrecognized != nil {
|
||||||
|
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
|
||||||
|
}
|
||||||
|
s = append(s, "}")
|
||||||
|
return strings.Join(s, "")
|
||||||
|
}
|
||||||
func (this *EnumOptions) GoString() string {
|
func (this *EnumOptions) GoString() string {
|
||||||
if this == nil {
|
if this == nil {
|
||||||
return "nil"
|
return "nil"
|
||||||
|
@ -522,11 +543,14 @@ func (this *MethodOptions) GoString() string {
|
||||||
if this == nil {
|
if this == nil {
|
||||||
return "nil"
|
return "nil"
|
||||||
}
|
}
|
||||||
s := make([]string, 0, 6)
|
s := make([]string, 0, 7)
|
||||||
s = append(s, "&descriptor.MethodOptions{")
|
s = append(s, "&descriptor.MethodOptions{")
|
||||||
if this.Deprecated != nil {
|
if this.Deprecated != nil {
|
||||||
s = append(s, "Deprecated: "+valueToGoStringDescriptor(this.Deprecated, "bool")+",\n")
|
s = append(s, "Deprecated: "+valueToGoStringDescriptor(this.Deprecated, "bool")+",\n")
|
||||||
}
|
}
|
||||||
|
if this.IdempotencyLevel != nil {
|
||||||
|
s = append(s, "IdempotencyLevel: "+valueToGoStringDescriptor(this.IdempotencyLevel, "descriptor.MethodOptions_IdempotencyLevel")+",\n")
|
||||||
|
}
|
||||||
if this.UninterpretedOption != nil {
|
if this.UninterpretedOption != nil {
|
||||||
s = append(s, "UninterpretedOption: "+fmt.Sprintf("%#v", this.UninterpretedOption)+",\n")
|
s = append(s, "UninterpretedOption: "+fmt.Sprintf("%#v", this.UninterpretedOption)+",\n")
|
||||||
}
|
}
|
||||||
|
@ -630,6 +654,45 @@ func (this *SourceCodeInfo_Location) GoString() string {
|
||||||
s = append(s, "}")
|
s = append(s, "}")
|
||||||
return strings.Join(s, "")
|
return strings.Join(s, "")
|
||||||
}
|
}
|
||||||
|
func (this *GeneratedCodeInfo) GoString() string {
|
||||||
|
if this == nil {
|
||||||
|
return "nil"
|
||||||
|
}
|
||||||
|
s := make([]string, 0, 5)
|
||||||
|
s = append(s, "&descriptor.GeneratedCodeInfo{")
|
||||||
|
if this.Annotation != nil {
|
||||||
|
s = append(s, "Annotation: "+fmt.Sprintf("%#v", this.Annotation)+",\n")
|
||||||
|
}
|
||||||
|
if this.XXX_unrecognized != nil {
|
||||||
|
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
|
||||||
|
}
|
||||||
|
s = append(s, "}")
|
||||||
|
return strings.Join(s, "")
|
||||||
|
}
|
||||||
|
func (this *GeneratedCodeInfo_Annotation) GoString() string {
|
||||||
|
if this == nil {
|
||||||
|
return "nil"
|
||||||
|
}
|
||||||
|
s := make([]string, 0, 8)
|
||||||
|
s = append(s, "&descriptor.GeneratedCodeInfo_Annotation{")
|
||||||
|
if this.Path != nil {
|
||||||
|
s = append(s, "Path: "+fmt.Sprintf("%#v", this.Path)+",\n")
|
||||||
|
}
|
||||||
|
if this.SourceFile != nil {
|
||||||
|
s = append(s, "SourceFile: "+valueToGoStringDescriptor(this.SourceFile, "string")+",\n")
|
||||||
|
}
|
||||||
|
if this.Begin != nil {
|
||||||
|
s = append(s, "Begin: "+valueToGoStringDescriptor(this.Begin, "int32")+",\n")
|
||||||
|
}
|
||||||
|
if this.End != nil {
|
||||||
|
s = append(s, "End: "+valueToGoStringDescriptor(this.End, "int32")+",\n")
|
||||||
|
}
|
||||||
|
if this.XXX_unrecognized != nil {
|
||||||
|
s = append(s, "XXX_unrecognized:"+fmt.Sprintf("%#v", this.XXX_unrecognized)+",\n")
|
||||||
|
}
|
||||||
|
s = append(s, "}")
|
||||||
|
return strings.Join(s, "")
|
||||||
|
}
|
||||||
func valueToGoStringDescriptor(v interface{}, typ string) string {
|
func valueToGoStringDescriptor(v interface{}, typ string) string {
|
||||||
rv := reflect.ValueOf(v)
|
rv := reflect.ValueOf(v)
|
||||||
if rv.IsNil() {
|
if rv.IsNil() {
|
||||||
|
|
33
vendor/github.com/gogo/protobuf/protoc-gen-gogo/descriptor/helper.go
generated
vendored
33
vendor/github.com/gogo/protobuf/protoc-gen-gogo/descriptor/helper.go
generated
vendored
|
@ -99,6 +99,17 @@ func (field *FieldDescriptorProto) GetKeyUint64() (x uint64) {
|
||||||
return x
|
return x
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (field *FieldDescriptorProto) GetKey3Uint64() (x uint64) {
|
||||||
|
packed := field.IsPacked3()
|
||||||
|
wireType := field.WireType()
|
||||||
|
fieldNumber := field.GetNumber()
|
||||||
|
if packed {
|
||||||
|
wireType = 2
|
||||||
|
}
|
||||||
|
x = uint64(uint32(fieldNumber)<<3 | uint32(wireType))
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
|
||||||
func (field *FieldDescriptorProto) GetKey() []byte {
|
func (field *FieldDescriptorProto) GetKey() []byte {
|
||||||
x := field.GetKeyUint64()
|
x := field.GetKeyUint64()
|
||||||
i := 0
|
i := 0
|
||||||
|
@ -111,6 +122,18 @@ func (field *FieldDescriptorProto) GetKey() []byte {
|
||||||
return keybuf
|
return keybuf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (field *FieldDescriptorProto) GetKey3() []byte {
|
||||||
|
x := field.GetKey3Uint64()
|
||||||
|
i := 0
|
||||||
|
keybuf := make([]byte, 0)
|
||||||
|
for i = 0; x > 127; i++ {
|
||||||
|
keybuf = append(keybuf, 0x80|uint8(x&0x7F))
|
||||||
|
x >>= 7
|
||||||
|
}
|
||||||
|
keybuf = append(keybuf, uint8(x))
|
||||||
|
return keybuf
|
||||||
|
}
|
||||||
|
|
||||||
func (desc *FileDescriptorSet) GetField(packageName, messageName, fieldName string) *FieldDescriptorProto {
|
func (desc *FileDescriptorSet) GetField(packageName, messageName, fieldName string) *FieldDescriptorProto {
|
||||||
msg := desc.GetMessage(packageName, messageName)
|
msg := desc.GetMessage(packageName, messageName)
|
||||||
if msg == nil {
|
if msg == nil {
|
||||||
|
@ -352,6 +375,16 @@ func (f *FieldDescriptorProto) IsPacked() bool {
|
||||||
return f.Options != nil && f.GetOptions().GetPacked()
|
return f.Options != nil && f.GetOptions().GetPacked()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *FieldDescriptorProto) IsPacked3() bool {
|
||||||
|
if f.IsRepeated() && f.IsScalar() {
|
||||||
|
if f.Options == nil || f.GetOptions().Packed == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return f.Options != nil && f.GetOptions().GetPacked()
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func (m *DescriptorProto) HasExtension() bool {
|
func (m *DescriptorProto) HasExtension() bool {
|
||||||
return len(m.ExtensionRange) > 0
|
return len(m.ExtensionRange) > 0
|
||||||
}
|
}
|
||||||
|
|
5
vendor/github.com/golang/protobuf/README.md
generated
vendored
5
vendor/github.com/golang/protobuf/README.md
generated
vendored
|
@ -1,7 +1,5 @@
|
||||||
# Go support for Protocol Buffers
|
# Go support for Protocol Buffers
|
||||||
|
|
||||||
[![Build Status](https://travis-ci.org/golang/protobuf.svg?branch=master)](https://travis-ci.org/golang/protobuf)
|
|
||||||
|
|
||||||
Google's data interchange format.
|
Google's data interchange format.
|
||||||
Copyright 2010 The Go Authors.
|
Copyright 2010 The Go Authors.
|
||||||
https://github.com/golang/protobuf
|
https://github.com/golang/protobuf
|
||||||
|
@ -24,7 +22,7 @@ To use this software, you must:
|
||||||
for details or, if you are using gccgo, follow the instructions at
|
for details or, if you are using gccgo, follow the instructions at
|
||||||
https://golang.org/doc/install/gccgo
|
https://golang.org/doc/install/gccgo
|
||||||
- Grab the code from the repository and install the proto package.
|
- Grab the code from the repository and install the proto package.
|
||||||
The simplest way is to run `go get -u github.com/golang/protobuf/protoc-gen-go`.
|
The simplest way is to run `go get -u github.com/golang/protobuf/{proto,protoc-gen-go}`.
|
||||||
The compiler plugin, protoc-gen-go, will be installed in $GOBIN,
|
The compiler plugin, protoc-gen-go, will be installed in $GOBIN,
|
||||||
defaulting to $GOPATH/bin. It must be in your $PATH for the protocol
|
defaulting to $GOPATH/bin. It must be in your $PATH for the protocol
|
||||||
compiler, protoc, to find it.
|
compiler, protoc, to find it.
|
||||||
|
@ -106,6 +104,7 @@ for a protocol buffer variable v:
|
||||||
When the .proto file specifies `syntax="proto3"`, there are some differences:
|
When the .proto file specifies `syntax="proto3"`, there are some differences:
|
||||||
|
|
||||||
- Non-repeated fields of non-message type are values instead of pointers.
|
- Non-repeated fields of non-message type are values instead of pointers.
|
||||||
|
- Getters are only generated for message and oneof fields.
|
||||||
- Enum types do not get an Enum method.
|
- Enum types do not get an Enum method.
|
||||||
|
|
||||||
Consider file test.proto, containing
|
Consider file test.proto, containing
|
||||||
|
|
15
vendor/github.com/golang/protobuf/proto/encode.go
generated
vendored
15
vendor/github.com/golang/protobuf/proto/encode.go
generated
vendored
|
@ -174,11 +174,11 @@ func sizeFixed32(x uint64) int {
|
||||||
// This is the format used for the sint64 protocol buffer type.
|
// This is the format used for the sint64 protocol buffer type.
|
||||||
func (p *Buffer) EncodeZigzag64(x uint64) error {
|
func (p *Buffer) EncodeZigzag64(x uint64) error {
|
||||||
// use signed number to get arithmetic right shift.
|
// use signed number to get arithmetic right shift.
|
||||||
return p.EncodeVarint((x << 1) ^ uint64((int64(x) >> 63)))
|
return p.EncodeVarint(uint64((x << 1) ^ uint64((int64(x) >> 63))))
|
||||||
}
|
}
|
||||||
|
|
||||||
func sizeZigzag64(x uint64) int {
|
func sizeZigzag64(x uint64) int {
|
||||||
return sizeVarint((x << 1) ^ uint64((int64(x) >> 63)))
|
return sizeVarint(uint64((x << 1) ^ uint64((int64(x) >> 63))))
|
||||||
}
|
}
|
||||||
|
|
||||||
// EncodeZigzag32 writes a zigzag-encoded 32-bit integer
|
// EncodeZigzag32 writes a zigzag-encoded 32-bit integer
|
||||||
|
@ -1075,17 +1075,10 @@ func (o *Buffer) enc_map(p *Properties, base structPointer) error {
|
||||||
|
|
||||||
func (o *Buffer) enc_exts(p *Properties, base structPointer) error {
|
func (o *Buffer) enc_exts(p *Properties, base structPointer) error {
|
||||||
exts := structPointer_Extensions(base, p.field)
|
exts := structPointer_Extensions(base, p.field)
|
||||||
|
if err := encodeExtensions(exts); err != nil {
|
||||||
v, mu := exts.extensionsRead()
|
|
||||||
if v == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
mu.Lock()
|
|
||||||
defer mu.Unlock()
|
|
||||||
if err := encodeExtensionsMap(v); err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
v, _ := exts.extensionsRead()
|
||||||
|
|
||||||
return o.enc_map_body(v)
|
return o.enc_map_body(v)
|
||||||
}
|
}
|
||||||
|
|
1
vendor/github.com/golang/protobuf/proto/extensions.go
generated
vendored
1
vendor/github.com/golang/protobuf/proto/extensions.go
generated
vendored
|
@ -154,7 +154,6 @@ type ExtensionDesc struct {
|
||||||
Field int32 // field number
|
Field int32 // field number
|
||||||
Name string // fully-qualified name of extension, for text formatting
|
Name string // fully-qualified name of extension, for text formatting
|
||||||
Tag string // protobuf tag style
|
Tag string // protobuf tag style
|
||||||
Filename string // name of the file in which the extension is defined
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ed *ExtensionDesc) repeated() bool {
|
func (ed *ExtensionDesc) repeated() bool {
|
||||||
|
|
1
vendor/github.com/golang/protobuf/proto/lib.go
generated
vendored
1
vendor/github.com/golang/protobuf/proto/lib.go
generated
vendored
|
@ -73,6 +73,7 @@ for a protocol buffer variable v:
|
||||||
When the .proto file specifies `syntax="proto3"`, there are some differences:
|
When the .proto file specifies `syntax="proto3"`, there are some differences:
|
||||||
|
|
||||||
- Non-repeated fields of non-message type are values instead of pointers.
|
- Non-repeated fields of non-message type are values instead of pointers.
|
||||||
|
- Getters are only generated for message and oneof fields.
|
||||||
- Enum types do not get an Enum method.
|
- Enum types do not get an Enum method.
|
||||||
|
|
||||||
The simplest way to describe this is to see an example.
|
The simplest way to describe this is to see an example.
|
||||||
|
|
2
vendor/github.com/golang/protobuf/proto/text_parser.go
generated
vendored
2
vendor/github.com/golang/protobuf/proto/text_parser.go
generated
vendored
|
@ -865,7 +865,7 @@ func (p *textParser) readAny(v reflect.Value, props *Properties) error {
|
||||||
return p.readStruct(fv, terminator)
|
return p.readStruct(fv, terminator)
|
||||||
case reflect.Uint32:
|
case reflect.Uint32:
|
||||||
if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil {
|
if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil {
|
||||||
fv.SetUint(x)
|
fv.SetUint(uint64(x))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
case reflect.Uint64:
|
case reflect.Uint64:
|
||||||
|
|
136
vendor/github.com/golang/protobuf/ptypes/any.go
generated
vendored
Normal file
136
vendor/github.com/golang/protobuf/ptypes/any.go
generated
vendored
Normal file
|
@ -0,0 +1,136 @@
|
||||||
|
// Go support for Protocol Buffers - Google's data interchange format
|
||||||
|
//
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// https://github.com/golang/protobuf
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
package ptypes
|
||||||
|
|
||||||
|
// This file implements functions to marshal proto.Message to/from
|
||||||
|
// google.protobuf.Any message.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/golang/protobuf/proto"
|
||||||
|
"github.com/golang/protobuf/ptypes/any"
|
||||||
|
)
|
||||||
|
|
||||||
|
const googleApis = "type.googleapis.com/"
|
||||||
|
|
||||||
|
// AnyMessageName returns the name of the message contained in a google.protobuf.Any message.
|
||||||
|
//
|
||||||
|
// Note that regular type assertions should be done using the Is
|
||||||
|
// function. AnyMessageName is provided for less common use cases like filtering a
|
||||||
|
// sequence of Any messages based on a set of allowed message type names.
|
||||||
|
func AnyMessageName(any *any.Any) (string, error) {
|
||||||
|
slash := strings.LastIndex(any.TypeUrl, "/")
|
||||||
|
if slash < 0 {
|
||||||
|
return "", fmt.Errorf("message type url %q is invalid", any.TypeUrl)
|
||||||
|
}
|
||||||
|
return any.TypeUrl[slash+1:], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalAny takes the protocol buffer and encodes it into google.protobuf.Any.
|
||||||
|
func MarshalAny(pb proto.Message) (*any.Any, error) {
|
||||||
|
value, err := proto.Marshal(pb)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &any.Any{TypeUrl: googleApis + proto.MessageName(pb), Value: value}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DynamicAny is a value that can be passed to UnmarshalAny to automatically
|
||||||
|
// allocate a proto.Message for the type specified in a google.protobuf.Any
|
||||||
|
// message. The allocated message is stored in the embedded proto.Message.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var x ptypes.DynamicAny
|
||||||
|
// if err := ptypes.UnmarshalAny(a, &x); err != nil { ... }
|
||||||
|
// fmt.Printf("unmarshaled message: %v", x.Message)
|
||||||
|
type DynamicAny struct {
|
||||||
|
proto.Message
|
||||||
|
}
|
||||||
|
|
||||||
|
// Empty returns a new proto.Message of the type specified in a
|
||||||
|
// google.protobuf.Any message. It returns an error if corresponding message
|
||||||
|
// type isn't linked in.
|
||||||
|
func Empty(any *any.Any) (proto.Message, error) {
|
||||||
|
aname, err := AnyMessageName(any)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
t := proto.MessageType(aname)
|
||||||
|
if t == nil {
|
||||||
|
return nil, fmt.Errorf("any: message type %q isn't linked in", aname)
|
||||||
|
}
|
||||||
|
return reflect.New(t.Elem()).Interface().(proto.Message), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalAny parses the protocol buffer representation in a google.protobuf.Any
|
||||||
|
// message and places the decoded result in pb. It returns an error if type of
|
||||||
|
// contents of Any message does not match type of pb message.
|
||||||
|
//
|
||||||
|
// pb can be a proto.Message, or a *DynamicAny.
|
||||||
|
func UnmarshalAny(any *any.Any, pb proto.Message) error {
|
||||||
|
if d, ok := pb.(*DynamicAny); ok {
|
||||||
|
if d.Message == nil {
|
||||||
|
var err error
|
||||||
|
d.Message, err = Empty(any)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return UnmarshalAny(any, d.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
aname, err := AnyMessageName(any)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
mname := proto.MessageName(pb)
|
||||||
|
if aname != mname {
|
||||||
|
return fmt.Errorf("mismatched message type: got %q want %q", aname, mname)
|
||||||
|
}
|
||||||
|
return proto.Unmarshal(any.Value, pb)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Is returns true if any value contains a given message type.
|
||||||
|
func Is(any *any.Any, pb proto.Message) bool {
|
||||||
|
aname, err := AnyMessageName(any)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return aname == proto.MessageName(pb)
|
||||||
|
}
|
155
vendor/github.com/golang/protobuf/ptypes/any/any.pb.go
generated
vendored
Normal file
155
vendor/github.com/golang/protobuf/ptypes/any/any.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,155 @@
|
||||||
|
// Code generated by protoc-gen-go.
|
||||||
|
// source: github.com/golang/protobuf/ptypes/any/any.proto
|
||||||
|
// DO NOT EDIT!
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package any is a generated protocol buffer package.
|
||||||
|
|
||||||
|
It is generated from these files:
|
||||||
|
github.com/golang/protobuf/ptypes/any/any.proto
|
||||||
|
|
||||||
|
It has these top-level messages:
|
||||||
|
Any
|
||||||
|
*/
|
||||||
|
package any
|
||||||
|
|
||||||
|
import proto "github.com/golang/protobuf/proto"
|
||||||
|
import fmt "fmt"
|
||||||
|
import math "math"
|
||||||
|
|
||||||
|
// Reference imports to suppress errors if they are not otherwise used.
|
||||||
|
var _ = proto.Marshal
|
||||||
|
var _ = fmt.Errorf
|
||||||
|
var _ = math.Inf
|
||||||
|
|
||||||
|
// This is a compile-time assertion to ensure that this generated file
|
||||||
|
// is compatible with the proto package it is being compiled against.
|
||||||
|
// A compilation error at this line likely means your copy of the
|
||||||
|
// proto package needs to be updated.
|
||||||
|
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||||
|
|
||||||
|
// `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||||
|
// URL that describes the type of the serialized message.
|
||||||
|
//
|
||||||
|
// Protobuf library provides support to pack/unpack Any values in the form
|
||||||
|
// of utility functions or additional generated methods of the Any type.
|
||||||
|
//
|
||||||
|
// Example 1: Pack and unpack a message in C++.
|
||||||
|
//
|
||||||
|
// Foo foo = ...;
|
||||||
|
// Any any;
|
||||||
|
// any.PackFrom(foo);
|
||||||
|
// ...
|
||||||
|
// if (any.UnpackTo(&foo)) {
|
||||||
|
// ...
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Example 2: Pack and unpack a message in Java.
|
||||||
|
//
|
||||||
|
// Foo foo = ...;
|
||||||
|
// Any any = Any.pack(foo);
|
||||||
|
// ...
|
||||||
|
// if (any.is(Foo.class)) {
|
||||||
|
// foo = any.unpack(Foo.class);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Example 3: Pack and unpack a message in Python.
|
||||||
|
//
|
||||||
|
// foo = Foo(...)
|
||||||
|
// any = Any()
|
||||||
|
// any.Pack(foo)
|
||||||
|
// ...
|
||||||
|
// if any.Is(Foo.DESCRIPTOR):
|
||||||
|
// any.Unpack(foo)
|
||||||
|
// ...
|
||||||
|
//
|
||||||
|
// The pack methods provided by protobuf library will by default use
|
||||||
|
// 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||||
|
// methods only use the fully qualified type name after the last '/'
|
||||||
|
// in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||||
|
// name "y.z".
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// JSON
|
||||||
|
// ====
|
||||||
|
// The JSON representation of an `Any` value uses the regular
|
||||||
|
// representation of the deserialized, embedded message, with an
|
||||||
|
// additional field `@type` which contains the type URL. Example:
|
||||||
|
//
|
||||||
|
// package google.profile;
|
||||||
|
// message Person {
|
||||||
|
// string first_name = 1;
|
||||||
|
// string last_name = 2;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// {
|
||||||
|
// "@type": "type.googleapis.com/google.profile.Person",
|
||||||
|
// "firstName": <string>,
|
||||||
|
// "lastName": <string>
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// If the embedded message type is well-known and has a custom JSON
|
||||||
|
// representation, that representation will be embedded adding a field
|
||||||
|
// `value` which holds the custom JSON in addition to the `@type`
|
||||||
|
// field. Example (for message [google.protobuf.Duration][]):
|
||||||
|
//
|
||||||
|
// {
|
||||||
|
// "@type": "type.googleapis.com/google.protobuf.Duration",
|
||||||
|
// "value": "1.212s"
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
type Any struct {
|
||||||
|
// A URL/resource name whose content describes the type of the
|
||||||
|
// serialized protocol buffer message.
|
||||||
|
//
|
||||||
|
// For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||||
|
// following restrictions and interpretations apply:
|
||||||
|
//
|
||||||
|
// * If no scheme is provided, `https` is assumed.
|
||||||
|
// * The last segment of the URL's path must represent the fully
|
||||||
|
// qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||||
|
// The name should be in a canonical form (e.g., leading "." is
|
||||||
|
// not accepted).
|
||||||
|
// * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||||
|
// value in binary format, or produce an error.
|
||||||
|
// * Applications are allowed to cache lookup results based on the
|
||||||
|
// URL, or have them precompiled into a binary to avoid any
|
||||||
|
// lookup. Therefore, binary compatibility needs to be preserved
|
||||||
|
// on changes to types. (Use versioned type names to manage
|
||||||
|
// breaking changes.)
|
||||||
|
//
|
||||||
|
// Schemes other than `http`, `https` (or the empty scheme) might be
|
||||||
|
// used with implementation specific semantics.
|
||||||
|
//
|
||||||
|
TypeUrl string `protobuf:"bytes,1,opt,name=type_url,json=typeUrl" json:"type_url,omitempty"`
|
||||||
|
// Must be a valid serialized protocol buffer of the above specified type.
|
||||||
|
Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Any) Reset() { *m = Any{} }
|
||||||
|
func (m *Any) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*Any) ProtoMessage() {}
|
||||||
|
func (*Any) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
|
||||||
|
func (*Any) XXX_WellKnownType() string { return "Any" }
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
proto.RegisterType((*Any)(nil), "google.protobuf.Any")
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() { proto.RegisterFile("github.com/golang/protobuf/ptypes/any/any.proto", fileDescriptor0) }
|
||||||
|
|
||||||
|
var fileDescriptor0 = []byte{
|
||||||
|
// 187 bytes of a gzipped FileDescriptorProto
|
||||||
|
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xe2, 0xd2, 0x4f, 0xcf, 0x2c, 0xc9,
|
||||||
|
0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28,
|
||||||
|
0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x4f, 0xcc,
|
||||||
|
0xab, 0x04, 0x61, 0x3d, 0xb0, 0xb8, 0x10, 0x7f, 0x7a, 0x7e, 0x7e, 0x7a, 0x4e, 0xaa, 0x1e, 0x4c,
|
||||||
|
0x95, 0x92, 0x19, 0x17, 0xb3, 0x63, 0x5e, 0xa5, 0x90, 0x24, 0x17, 0x07, 0x48, 0x79, 0x7c, 0x69,
|
||||||
|
0x51, 0x8e, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x67, 0x10, 0x3b, 0x88, 0x1f, 0x5a, 0x94, 0x23, 0x24,
|
||||||
|
0xc2, 0xc5, 0x5a, 0x96, 0x98, 0x53, 0x9a, 0x2a, 0xc1, 0xa4, 0xc0, 0xa8, 0xc1, 0x13, 0x04, 0xe1,
|
||||||
|
0x38, 0x15, 0x71, 0x09, 0x27, 0xe7, 0xe7, 0xea, 0xa1, 0x19, 0xe7, 0xc4, 0xe1, 0x98, 0x57, 0x19,
|
||||||
|
0x00, 0xe2, 0x04, 0x30, 0x46, 0xa9, 0x12, 0xe5, 0xb8, 0x05, 0x8c, 0x8c, 0x8b, 0x98, 0x98, 0xdd,
|
||||||
|
0x03, 0x9c, 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x4c, 0x0b, 0x80, 0xaa, 0xd2, 0x0b, 0x4f, 0xcd, 0xc9,
|
||||||
|
0xf1, 0xce, 0xcb, 0x2f, 0xcf, 0x0b, 0x01, 0xa9, 0x4e, 0x62, 0x03, 0x6b, 0x37, 0x06, 0x04, 0x00,
|
||||||
|
0x00, 0xff, 0xff, 0xc6, 0x4d, 0x03, 0x23, 0xf6, 0x00, 0x00, 0x00,
|
||||||
|
}
|
140
vendor/github.com/golang/protobuf/ptypes/any/any.proto
generated
vendored
Normal file
140
vendor/github.com/golang/protobuf/ptypes/any/any.proto
generated
vendored
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package google.protobuf;
|
||||||
|
|
||||||
|
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||||
|
option go_package = "github.com/golang/protobuf/ptypes/any";
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "AnyProto";
|
||||||
|
option java_multiple_files = true;
|
||||||
|
option java_generate_equals_and_hash = true;
|
||||||
|
option objc_class_prefix = "GPB";
|
||||||
|
|
||||||
|
// `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||||
|
// URL that describes the type of the serialized message.
|
||||||
|
//
|
||||||
|
// Protobuf library provides support to pack/unpack Any values in the form
|
||||||
|
// of utility functions or additional generated methods of the Any type.
|
||||||
|
//
|
||||||
|
// Example 1: Pack and unpack a message in C++.
|
||||||
|
//
|
||||||
|
// Foo foo = ...;
|
||||||
|
// Any any;
|
||||||
|
// any.PackFrom(foo);
|
||||||
|
// ...
|
||||||
|
// if (any.UnpackTo(&foo)) {
|
||||||
|
// ...
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Example 2: Pack and unpack a message in Java.
|
||||||
|
//
|
||||||
|
// Foo foo = ...;
|
||||||
|
// Any any = Any.pack(foo);
|
||||||
|
// ...
|
||||||
|
// if (any.is(Foo.class)) {
|
||||||
|
// foo = any.unpack(Foo.class);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Example 3: Pack and unpack a message in Python.
|
||||||
|
//
|
||||||
|
// foo = Foo(...)
|
||||||
|
// any = Any()
|
||||||
|
// any.Pack(foo)
|
||||||
|
// ...
|
||||||
|
// if any.Is(Foo.DESCRIPTOR):
|
||||||
|
// any.Unpack(foo)
|
||||||
|
// ...
|
||||||
|
//
|
||||||
|
// The pack methods provided by protobuf library will by default use
|
||||||
|
// 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||||
|
// methods only use the fully qualified type name after the last '/'
|
||||||
|
// in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||||
|
// name "y.z".
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// JSON
|
||||||
|
// ====
|
||||||
|
// The JSON representation of an `Any` value uses the regular
|
||||||
|
// representation of the deserialized, embedded message, with an
|
||||||
|
// additional field `@type` which contains the type URL. Example:
|
||||||
|
//
|
||||||
|
// package google.profile;
|
||||||
|
// message Person {
|
||||||
|
// string first_name = 1;
|
||||||
|
// string last_name = 2;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// {
|
||||||
|
// "@type": "type.googleapis.com/google.profile.Person",
|
||||||
|
// "firstName": <string>,
|
||||||
|
// "lastName": <string>
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// If the embedded message type is well-known and has a custom JSON
|
||||||
|
// representation, that representation will be embedded adding a field
|
||||||
|
// `value` which holds the custom JSON in addition to the `@type`
|
||||||
|
// field. Example (for message [google.protobuf.Duration][]):
|
||||||
|
//
|
||||||
|
// {
|
||||||
|
// "@type": "type.googleapis.com/google.protobuf.Duration",
|
||||||
|
// "value": "1.212s"
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
message Any {
|
||||||
|
// A URL/resource name whose content describes the type of the
|
||||||
|
// serialized protocol buffer message.
|
||||||
|
//
|
||||||
|
// For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||||
|
// following restrictions and interpretations apply:
|
||||||
|
//
|
||||||
|
// * If no scheme is provided, `https` is assumed.
|
||||||
|
// * The last segment of the URL's path must represent the fully
|
||||||
|
// qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||||
|
// The name should be in a canonical form (e.g., leading "." is
|
||||||
|
// not accepted).
|
||||||
|
// * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||||
|
// value in binary format, or produce an error.
|
||||||
|
// * Applications are allowed to cache lookup results based on the
|
||||||
|
// URL, or have them precompiled into a binary to avoid any
|
||||||
|
// lookup. Therefore, binary compatibility needs to be preserved
|
||||||
|
// on changes to types. (Use versioned type names to manage
|
||||||
|
// breaking changes.)
|
||||||
|
//
|
||||||
|
// Schemes other than `http`, `https` (or the empty scheme) might be
|
||||||
|
// used with implementation specific semantics.
|
||||||
|
//
|
||||||
|
string type_url = 1;
|
||||||
|
|
||||||
|
// Must be a valid serialized protocol buffer of the above specified type.
|
||||||
|
bytes value = 2;
|
||||||
|
}
|
35
vendor/github.com/golang/protobuf/ptypes/doc.go
generated
vendored
Normal file
35
vendor/github.com/golang/protobuf/ptypes/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
// Go support for Protocol Buffers - Google's data interchange format
|
||||||
|
//
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// https://github.com/golang/protobuf
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package ptypes contains code for interacting with well-known types.
|
||||||
|
*/
|
||||||
|
package ptypes
|
102
vendor/github.com/golang/protobuf/ptypes/duration.go
generated
vendored
Normal file
102
vendor/github.com/golang/protobuf/ptypes/duration.go
generated
vendored
Normal file
|
@ -0,0 +1,102 @@
|
||||||
|
// Go support for Protocol Buffers - Google's data interchange format
|
||||||
|
//
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// https://github.com/golang/protobuf
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
package ptypes
|
||||||
|
|
||||||
|
// This file implements conversions between google.protobuf.Duration
|
||||||
|
// and time.Duration.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
durpb "github.com/golang/protobuf/ptypes/duration"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Range of a durpb.Duration in seconds, as specified in
|
||||||
|
// google/protobuf/duration.proto. This is about 10,000 years in seconds.
|
||||||
|
maxSeconds = int64(10000 * 365.25 * 24 * 60 * 60)
|
||||||
|
minSeconds = -maxSeconds
|
||||||
|
)
|
||||||
|
|
||||||
|
// validateDuration determines whether the durpb.Duration is valid according to the
|
||||||
|
// definition in google/protobuf/duration.proto. A valid durpb.Duration
|
||||||
|
// may still be too large to fit into a time.Duration (the range of durpb.Duration
|
||||||
|
// is about 10,000 years, and the range of time.Duration is about 290).
|
||||||
|
func validateDuration(d *durpb.Duration) error {
|
||||||
|
if d == nil {
|
||||||
|
return errors.New("duration: nil Duration")
|
||||||
|
}
|
||||||
|
if d.Seconds < minSeconds || d.Seconds > maxSeconds {
|
||||||
|
return fmt.Errorf("duration: %v: seconds out of range", d)
|
||||||
|
}
|
||||||
|
if d.Nanos <= -1e9 || d.Nanos >= 1e9 {
|
||||||
|
return fmt.Errorf("duration: %v: nanos out of range", d)
|
||||||
|
}
|
||||||
|
// Seconds and Nanos must have the same sign, unless d.Nanos is zero.
|
||||||
|
if (d.Seconds < 0 && d.Nanos > 0) || (d.Seconds > 0 && d.Nanos < 0) {
|
||||||
|
return fmt.Errorf("duration: %v: seconds and nanos have different signs", d)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Duration converts a durpb.Duration to a time.Duration. Duration
|
||||||
|
// returns an error if the durpb.Duration is invalid or is too large to be
|
||||||
|
// represented in a time.Duration.
|
||||||
|
func Duration(p *durpb.Duration) (time.Duration, error) {
|
||||||
|
if err := validateDuration(p); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
d := time.Duration(p.Seconds) * time.Second
|
||||||
|
if int64(d/time.Second) != p.Seconds {
|
||||||
|
return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p)
|
||||||
|
}
|
||||||
|
if p.Nanos != 0 {
|
||||||
|
d += time.Duration(p.Nanos)
|
||||||
|
if (d < 0) != (p.Nanos < 0) {
|
||||||
|
return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return d, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DurationProto converts a time.Duration to a durpb.Duration.
|
||||||
|
func DurationProto(d time.Duration) *durpb.Duration {
|
||||||
|
nanos := d.Nanoseconds()
|
||||||
|
secs := nanos / 1e9
|
||||||
|
nanos -= secs * 1e9
|
||||||
|
return &durpb.Duration{
|
||||||
|
Seconds: secs,
|
||||||
|
Nanos: int32(nanos),
|
||||||
|
}
|
||||||
|
}
|
114
vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go
generated
vendored
Normal file
114
vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
// Code generated by protoc-gen-go.
|
||||||
|
// source: github.com/golang/protobuf/ptypes/duration/duration.proto
|
||||||
|
// DO NOT EDIT!
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package duration is a generated protocol buffer package.
|
||||||
|
|
||||||
|
It is generated from these files:
|
||||||
|
github.com/golang/protobuf/ptypes/duration/duration.proto
|
||||||
|
|
||||||
|
It has these top-level messages:
|
||||||
|
Duration
|
||||||
|
*/
|
||||||
|
package duration
|
||||||
|
|
||||||
|
import proto "github.com/golang/protobuf/proto"
|
||||||
|
import fmt "fmt"
|
||||||
|
import math "math"
|
||||||
|
|
||||||
|
// Reference imports to suppress errors if they are not otherwise used.
|
||||||
|
var _ = proto.Marshal
|
||||||
|
var _ = fmt.Errorf
|
||||||
|
var _ = math.Inf
|
||||||
|
|
||||||
|
// This is a compile-time assertion to ensure that this generated file
|
||||||
|
// is compatible with the proto package it is being compiled against.
|
||||||
|
// A compilation error at this line likely means your copy of the
|
||||||
|
// proto package needs to be updated.
|
||||||
|
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||||
|
|
||||||
|
// A Duration represents a signed, fixed-length span of time represented
|
||||||
|
// as a count of seconds and fractions of seconds at nanosecond
|
||||||
|
// resolution. It is independent of any calendar and concepts like "day"
|
||||||
|
// or "month". It is related to Timestamp in that the difference between
|
||||||
|
// two Timestamp values is a Duration and it can be added or subtracted
|
||||||
|
// from a Timestamp. Range is approximately +-10,000 years.
|
||||||
|
//
|
||||||
|
// Example 1: Compute Duration from two Timestamps in pseudo code.
|
||||||
|
//
|
||||||
|
// Timestamp start = ...;
|
||||||
|
// Timestamp end = ...;
|
||||||
|
// Duration duration = ...;
|
||||||
|
//
|
||||||
|
// duration.seconds = end.seconds - start.seconds;
|
||||||
|
// duration.nanos = end.nanos - start.nanos;
|
||||||
|
//
|
||||||
|
// if (duration.seconds < 0 && duration.nanos > 0) {
|
||||||
|
// duration.seconds += 1;
|
||||||
|
// duration.nanos -= 1000000000;
|
||||||
|
// } else if (durations.seconds > 0 && duration.nanos < 0) {
|
||||||
|
// duration.seconds -= 1;
|
||||||
|
// duration.nanos += 1000000000;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
|
||||||
|
//
|
||||||
|
// Timestamp start = ...;
|
||||||
|
// Duration duration = ...;
|
||||||
|
// Timestamp end = ...;
|
||||||
|
//
|
||||||
|
// end.seconds = start.seconds + duration.seconds;
|
||||||
|
// end.nanos = start.nanos + duration.nanos;
|
||||||
|
//
|
||||||
|
// if (end.nanos < 0) {
|
||||||
|
// end.seconds -= 1;
|
||||||
|
// end.nanos += 1000000000;
|
||||||
|
// } else if (end.nanos >= 1000000000) {
|
||||||
|
// end.seconds += 1;
|
||||||
|
// end.nanos -= 1000000000;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
//
|
||||||
|
type Duration struct {
|
||||||
|
// Signed seconds of the span of time. Must be from -315,576,000,000
|
||||||
|
// to +315,576,000,000 inclusive.
|
||||||
|
Seconds int64 `protobuf:"varint,1,opt,name=seconds" json:"seconds,omitempty"`
|
||||||
|
// Signed fractions of a second at nanosecond resolution of the span
|
||||||
|
// of time. Durations less than one second are represented with a 0
|
||||||
|
// `seconds` field and a positive or negative `nanos` field. For durations
|
||||||
|
// of one second or more, a non-zero value for the `nanos` field must be
|
||||||
|
// of the same sign as the `seconds` field. Must be from -999,999,999
|
||||||
|
// to +999,999,999 inclusive.
|
||||||
|
Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Duration) Reset() { *m = Duration{} }
|
||||||
|
func (m *Duration) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*Duration) ProtoMessage() {}
|
||||||
|
func (*Duration) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
|
||||||
|
func (*Duration) XXX_WellKnownType() string { return "Duration" }
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
proto.RegisterType((*Duration)(nil), "google.protobuf.Duration")
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
proto.RegisterFile("github.com/golang/protobuf/ptypes/duration/duration.proto", fileDescriptor0)
|
||||||
|
}
|
||||||
|
|
||||||
|
var fileDescriptor0 = []byte{
|
||||||
|
// 189 bytes of a gzipped FileDescriptorProto
|
||||||
|
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xe2, 0xb2, 0x4c, 0xcf, 0x2c, 0xc9,
|
||||||
|
0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28,
|
||||||
|
0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x4f, 0x29,
|
||||||
|
0x2d, 0x4a, 0x2c, 0xc9, 0xcc, 0xcf, 0x83, 0x33, 0xf4, 0xc0, 0x2a, 0x84, 0xf8, 0xd3, 0xf3, 0xf3,
|
||||||
|
0xd3, 0x73, 0x52, 0xf5, 0x60, 0xea, 0x95, 0xac, 0xb8, 0x38, 0x5c, 0xa0, 0x4a, 0x84, 0x24, 0xb8,
|
||||||
|
0xd8, 0x8b, 0x53, 0x93, 0xf3, 0xf3, 0x52, 0x8a, 0x25, 0x18, 0x15, 0x18, 0x35, 0x98, 0x83, 0x60,
|
||||||
|
0x5c, 0x21, 0x11, 0x2e, 0xd6, 0xbc, 0xc4, 0xbc, 0xfc, 0x62, 0x09, 0x26, 0x05, 0x46, 0x0d, 0xd6,
|
||||||
|
0x20, 0x08, 0xc7, 0xa9, 0x86, 0x4b, 0x38, 0x39, 0x3f, 0x57, 0x0f, 0xcd, 0x48, 0x27, 0x5e, 0x98,
|
||||||
|
0x81, 0x01, 0x20, 0x91, 0x00, 0xc6, 0x28, 0x2d, 0xe2, 0xdd, 0xbb, 0x80, 0x91, 0x71, 0x11, 0x13,
|
||||||
|
0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88, 0xb9, 0x01, 0x50, 0xa5, 0x7a, 0xe1, 0xa9,
|
||||||
|
0x39, 0x39, 0xde, 0x79, 0xf9, 0xe5, 0x79, 0x21, 0x20, 0x2d, 0x49, 0x6c, 0x60, 0x33, 0x8c, 0x01,
|
||||||
|
0x01, 0x00, 0x00, 0xff, 0xff, 0x62, 0xfb, 0xb1, 0x51, 0x0e, 0x01, 0x00, 0x00,
|
||||||
|
}
|
98
vendor/github.com/golang/protobuf/ptypes/duration/duration.proto
generated
vendored
Normal file
98
vendor/github.com/golang/protobuf/ptypes/duration/duration.proto
generated
vendored
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package google.protobuf;
|
||||||
|
|
||||||
|
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||||
|
option go_package = "github.com/golang/protobuf/ptypes/duration";
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "DurationProto";
|
||||||
|
option java_multiple_files = true;
|
||||||
|
option java_generate_equals_and_hash = true;
|
||||||
|
option objc_class_prefix = "GPB";
|
||||||
|
|
||||||
|
// A Duration represents a signed, fixed-length span of time represented
|
||||||
|
// as a count of seconds and fractions of seconds at nanosecond
|
||||||
|
// resolution. It is independent of any calendar and concepts like "day"
|
||||||
|
// or "month". It is related to Timestamp in that the difference between
|
||||||
|
// two Timestamp values is a Duration and it can be added or subtracted
|
||||||
|
// from a Timestamp. Range is approximately +-10,000 years.
|
||||||
|
//
|
||||||
|
// Example 1: Compute Duration from two Timestamps in pseudo code.
|
||||||
|
//
|
||||||
|
// Timestamp start = ...;
|
||||||
|
// Timestamp end = ...;
|
||||||
|
// Duration duration = ...;
|
||||||
|
//
|
||||||
|
// duration.seconds = end.seconds - start.seconds;
|
||||||
|
// duration.nanos = end.nanos - start.nanos;
|
||||||
|
//
|
||||||
|
// if (duration.seconds < 0 && duration.nanos > 0) {
|
||||||
|
// duration.seconds += 1;
|
||||||
|
// duration.nanos -= 1000000000;
|
||||||
|
// } else if (durations.seconds > 0 && duration.nanos < 0) {
|
||||||
|
// duration.seconds -= 1;
|
||||||
|
// duration.nanos += 1000000000;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
|
||||||
|
//
|
||||||
|
// Timestamp start = ...;
|
||||||
|
// Duration duration = ...;
|
||||||
|
// Timestamp end = ...;
|
||||||
|
//
|
||||||
|
// end.seconds = start.seconds + duration.seconds;
|
||||||
|
// end.nanos = start.nanos + duration.nanos;
|
||||||
|
//
|
||||||
|
// if (end.nanos < 0) {
|
||||||
|
// end.seconds -= 1;
|
||||||
|
// end.nanos += 1000000000;
|
||||||
|
// } else if (end.nanos >= 1000000000) {
|
||||||
|
// end.seconds += 1;
|
||||||
|
// end.nanos -= 1000000000;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
//
|
||||||
|
message Duration {
|
||||||
|
|
||||||
|
// Signed seconds of the span of time. Must be from -315,576,000,000
|
||||||
|
// to +315,576,000,000 inclusive.
|
||||||
|
int64 seconds = 1;
|
||||||
|
|
||||||
|
// Signed fractions of a second at nanosecond resolution of the span
|
||||||
|
// of time. Durations less than one second are represented with a 0
|
||||||
|
// `seconds` field and a positive or negative `nanos` field. For durations
|
||||||
|
// of one second or more, a non-zero value for the `nanos` field must be
|
||||||
|
// of the same sign as the `seconds` field. Must be from -999,999,999
|
||||||
|
// to +999,999,999 inclusive.
|
||||||
|
int32 nanos = 2;
|
||||||
|
}
|
125
vendor/github.com/golang/protobuf/ptypes/timestamp.go
generated
vendored
Normal file
125
vendor/github.com/golang/protobuf/ptypes/timestamp.go
generated
vendored
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
// Go support for Protocol Buffers - Google's data interchange format
|
||||||
|
//
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// https://github.com/golang/protobuf
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
package ptypes
|
||||||
|
|
||||||
|
// This file implements operations on google.protobuf.Timestamp.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
tspb "github.com/golang/protobuf/ptypes/timestamp"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Seconds field of the earliest valid Timestamp.
|
||||||
|
// This is time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC).Unix().
|
||||||
|
minValidSeconds = -62135596800
|
||||||
|
// Seconds field just after the latest valid Timestamp.
|
||||||
|
// This is time.Date(10000, 1, 1, 0, 0, 0, 0, time.UTC).Unix().
|
||||||
|
maxValidSeconds = 253402300800
|
||||||
|
)
|
||||||
|
|
||||||
|
// validateTimestamp determines whether a Timestamp is valid.
|
||||||
|
// A valid timestamp represents a time in the range
|
||||||
|
// [0001-01-01, 10000-01-01) and has a Nanos field
|
||||||
|
// in the range [0, 1e9).
|
||||||
|
//
|
||||||
|
// If the Timestamp is valid, validateTimestamp returns nil.
|
||||||
|
// Otherwise, it returns an error that describes
|
||||||
|
// the problem.
|
||||||
|
//
|
||||||
|
// Every valid Timestamp can be represented by a time.Time, but the converse is not true.
|
||||||
|
func validateTimestamp(ts *tspb.Timestamp) error {
|
||||||
|
if ts == nil {
|
||||||
|
return errors.New("timestamp: nil Timestamp")
|
||||||
|
}
|
||||||
|
if ts.Seconds < minValidSeconds {
|
||||||
|
return fmt.Errorf("timestamp: %v before 0001-01-01", ts)
|
||||||
|
}
|
||||||
|
if ts.Seconds >= maxValidSeconds {
|
||||||
|
return fmt.Errorf("timestamp: %v after 10000-01-01", ts)
|
||||||
|
}
|
||||||
|
if ts.Nanos < 0 || ts.Nanos >= 1e9 {
|
||||||
|
return fmt.Errorf("timestamp: %v: nanos not in range [0, 1e9)", ts)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Timestamp converts a google.protobuf.Timestamp proto to a time.Time.
|
||||||
|
// It returns an error if the argument is invalid.
|
||||||
|
//
|
||||||
|
// Unlike most Go functions, if Timestamp returns an error, the first return value
|
||||||
|
// is not the zero time.Time. Instead, it is the value obtained from the
|
||||||
|
// time.Unix function when passed the contents of the Timestamp, in the UTC
|
||||||
|
// locale. This may or may not be a meaningful time; many invalid Timestamps
|
||||||
|
// do map to valid time.Times.
|
||||||
|
//
|
||||||
|
// A nil Timestamp returns an error. The first return value in that case is
|
||||||
|
// undefined.
|
||||||
|
func Timestamp(ts *tspb.Timestamp) (time.Time, error) {
|
||||||
|
// Don't return the zero value on error, because corresponds to a valid
|
||||||
|
// timestamp. Instead return whatever time.Unix gives us.
|
||||||
|
var t time.Time
|
||||||
|
if ts == nil {
|
||||||
|
t = time.Unix(0, 0).UTC() // treat nil like the empty Timestamp
|
||||||
|
} else {
|
||||||
|
t = time.Unix(ts.Seconds, int64(ts.Nanos)).UTC()
|
||||||
|
}
|
||||||
|
return t, validateTimestamp(ts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TimestampProto converts the time.Time to a google.protobuf.Timestamp proto.
|
||||||
|
// It returns an error if the resulting Timestamp is invalid.
|
||||||
|
func TimestampProto(t time.Time) (*tspb.Timestamp, error) {
|
||||||
|
seconds := t.Unix()
|
||||||
|
nanos := int32(t.Sub(time.Unix(seconds, 0)))
|
||||||
|
ts := &tspb.Timestamp{
|
||||||
|
Seconds: seconds,
|
||||||
|
Nanos: nanos,
|
||||||
|
}
|
||||||
|
if err := validateTimestamp(ts); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TimestampString returns the RFC 3339 string for valid Timestamps. For invalid
|
||||||
|
// Timestamps, it returns an error message in parentheses.
|
||||||
|
func TimestampString(ts *tspb.Timestamp) string {
|
||||||
|
t, err := Timestamp(ts)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Sprintf("(%v)", err)
|
||||||
|
}
|
||||||
|
return t.Format(time.RFC3339Nano)
|
||||||
|
}
|
127
vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go
generated
vendored
Normal file
127
vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
// Code generated by protoc-gen-go.
|
||||||
|
// source: github.com/golang/protobuf/ptypes/timestamp/timestamp.proto
|
||||||
|
// DO NOT EDIT!
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package timestamp is a generated protocol buffer package.
|
||||||
|
|
||||||
|
It is generated from these files:
|
||||||
|
github.com/golang/protobuf/ptypes/timestamp/timestamp.proto
|
||||||
|
|
||||||
|
It has these top-level messages:
|
||||||
|
Timestamp
|
||||||
|
*/
|
||||||
|
package timestamp
|
||||||
|
|
||||||
|
import proto "github.com/golang/protobuf/proto"
|
||||||
|
import fmt "fmt"
|
||||||
|
import math "math"
|
||||||
|
|
||||||
|
// Reference imports to suppress errors if they are not otherwise used.
|
||||||
|
var _ = proto.Marshal
|
||||||
|
var _ = fmt.Errorf
|
||||||
|
var _ = math.Inf
|
||||||
|
|
||||||
|
// This is a compile-time assertion to ensure that this generated file
|
||||||
|
// is compatible with the proto package it is being compiled against.
|
||||||
|
// A compilation error at this line likely means your copy of the
|
||||||
|
// proto package needs to be updated.
|
||||||
|
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||||
|
|
||||||
|
// A Timestamp represents a point in time independent of any time zone
|
||||||
|
// or calendar, represented as seconds and fractions of seconds at
|
||||||
|
// nanosecond resolution in UTC Epoch time. It is encoded using the
|
||||||
|
// Proleptic Gregorian Calendar which extends the Gregorian calendar
|
||||||
|
// backwards to year one. It is encoded assuming all minutes are 60
|
||||||
|
// seconds long, i.e. leap seconds are "smeared" so that no leap second
|
||||||
|
// table is needed for interpretation. Range is from
|
||||||
|
// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
|
||||||
|
// By restricting to that range, we ensure that we can convert to
|
||||||
|
// and from RFC 3339 date strings.
|
||||||
|
// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
|
||||||
|
//
|
||||||
|
// Example 1: Compute Timestamp from POSIX `time()`.
|
||||||
|
//
|
||||||
|
// Timestamp timestamp;
|
||||||
|
// timestamp.set_seconds(time(NULL));
|
||||||
|
// timestamp.set_nanos(0);
|
||||||
|
//
|
||||||
|
// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||||
|
//
|
||||||
|
// struct timeval tv;
|
||||||
|
// gettimeofday(&tv, NULL);
|
||||||
|
//
|
||||||
|
// Timestamp timestamp;
|
||||||
|
// timestamp.set_seconds(tv.tv_sec);
|
||||||
|
// timestamp.set_nanos(tv.tv_usec * 1000);
|
||||||
|
//
|
||||||
|
// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||||
|
//
|
||||||
|
// FILETIME ft;
|
||||||
|
// GetSystemTimeAsFileTime(&ft);
|
||||||
|
// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||||
|
//
|
||||||
|
// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||||
|
// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||||
|
// Timestamp timestamp;
|
||||||
|
// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||||
|
// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||||
|
//
|
||||||
|
// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||||
|
//
|
||||||
|
// long millis = System.currentTimeMillis();
|
||||||
|
//
|
||||||
|
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||||
|
// .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// Example 5: Compute Timestamp from current time in Python.
|
||||||
|
//
|
||||||
|
// now = time.time()
|
||||||
|
// seconds = int(now)
|
||||||
|
// nanos = int((now - seconds) * 10**9)
|
||||||
|
// timestamp = Timestamp(seconds=seconds, nanos=nanos)
|
||||||
|
//
|
||||||
|
//
|
||||||
|
type Timestamp struct {
|
||||||
|
// Represents seconds of UTC time since Unix epoch
|
||||||
|
// 1970-01-01T00:00:00Z. Must be from from 0001-01-01T00:00:00Z to
|
||||||
|
// 9999-12-31T23:59:59Z inclusive.
|
||||||
|
Seconds int64 `protobuf:"varint,1,opt,name=seconds" json:"seconds,omitempty"`
|
||||||
|
// Non-negative fractions of a second at nanosecond resolution. Negative
|
||||||
|
// second values with fractions must still have non-negative nanos values
|
||||||
|
// that count forward in time. Must be from 0 to 999,999,999
|
||||||
|
// inclusive.
|
||||||
|
Nanos int32 `protobuf:"varint,2,opt,name=nanos" json:"nanos,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Timestamp) Reset() { *m = Timestamp{} }
|
||||||
|
func (m *Timestamp) String() string { return proto.CompactTextString(m) }
|
||||||
|
func (*Timestamp) ProtoMessage() {}
|
||||||
|
func (*Timestamp) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
|
||||||
|
func (*Timestamp) XXX_WellKnownType() string { return "Timestamp" }
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
proto.RegisterType((*Timestamp)(nil), "google.protobuf.Timestamp")
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
proto.RegisterFile("github.com/golang/protobuf/ptypes/timestamp/timestamp.proto", fileDescriptor0)
|
||||||
|
}
|
||||||
|
|
||||||
|
var fileDescriptor0 = []byte{
|
||||||
|
// 194 bytes of a gzipped FileDescriptorProto
|
||||||
|
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xe2, 0xb2, 0x4e, 0xcf, 0x2c, 0xc9,
|
||||||
|
0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, 0x4b, 0xd7, 0x2f, 0x28,
|
||||||
|
0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x28, 0xa9, 0x2c, 0x48, 0x2d, 0xd6, 0x2f, 0xc9,
|
||||||
|
0xcc, 0x4d, 0x2d, 0x2e, 0x49, 0xcc, 0x2d, 0x40, 0xb0, 0xf4, 0xc0, 0x6a, 0x84, 0xf8, 0xd3, 0xf3,
|
||||||
|
0xf3, 0xd3, 0x73, 0x52, 0xf5, 0x60, 0x3a, 0x94, 0xac, 0xb9, 0x38, 0x43, 0x60, 0x6a, 0x84, 0x24,
|
||||||
|
0xb8, 0xd8, 0x8b, 0x53, 0x93, 0xf3, 0xf3, 0x52, 0x8a, 0x25, 0x18, 0x15, 0x18, 0x35, 0x98, 0x83,
|
||||||
|
0x60, 0x5c, 0x21, 0x11, 0x2e, 0xd6, 0xbc, 0xc4, 0xbc, 0xfc, 0x62, 0x09, 0x26, 0x05, 0x46, 0x0d,
|
||||||
|
0xd6, 0x20, 0x08, 0xc7, 0xa9, 0x91, 0x91, 0x4b, 0x38, 0x39, 0x3f, 0x57, 0x0f, 0xcd, 0x50, 0x27,
|
||||||
|
0x3e, 0xb8, 0x91, 0x01, 0x20, 0xa1, 0x00, 0xc6, 0x28, 0x6d, 0x12, 0x1c, 0xbd, 0x80, 0x91, 0xf1,
|
||||||
|
0x07, 0x23, 0xe3, 0x22, 0x26, 0x66, 0xf7, 0x00, 0xa7, 0x55, 0x4c, 0x72, 0xee, 0x10, 0xc3, 0x03,
|
||||||
|
0xa0, 0xca, 0xf5, 0xc2, 0x53, 0x73, 0x72, 0xbc, 0xf3, 0xf2, 0xcb, 0xf3, 0x42, 0x40, 0xda, 0x92,
|
||||||
|
0xd8, 0xc0, 0xe6, 0x18, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x17, 0x5f, 0xb7, 0xdc, 0x17, 0x01,
|
||||||
|
0x00, 0x00,
|
||||||
|
}
|
111
vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto
generated
vendored
Normal file
111
vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto
generated
vendored
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
// Protocol Buffers - Google's data interchange format
|
||||||
|
// Copyright 2008 Google Inc. All rights reserved.
|
||||||
|
// https://developers.google.com/protocol-buffers/
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package google.protobuf;
|
||||||
|
|
||||||
|
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
|
||||||
|
option cc_enable_arenas = true;
|
||||||
|
option go_package = "github.com/golang/protobuf/ptypes/timestamp";
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "TimestampProto";
|
||||||
|
option java_multiple_files = true;
|
||||||
|
option java_generate_equals_and_hash = true;
|
||||||
|
option objc_class_prefix = "GPB";
|
||||||
|
|
||||||
|
// A Timestamp represents a point in time independent of any time zone
|
||||||
|
// or calendar, represented as seconds and fractions of seconds at
|
||||||
|
// nanosecond resolution in UTC Epoch time. It is encoded using the
|
||||||
|
// Proleptic Gregorian Calendar which extends the Gregorian calendar
|
||||||
|
// backwards to year one. It is encoded assuming all minutes are 60
|
||||||
|
// seconds long, i.e. leap seconds are "smeared" so that no leap second
|
||||||
|
// table is needed for interpretation. Range is from
|
||||||
|
// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
|
||||||
|
// By restricting to that range, we ensure that we can convert to
|
||||||
|
// and from RFC 3339 date strings.
|
||||||
|
// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
|
||||||
|
//
|
||||||
|
// Example 1: Compute Timestamp from POSIX `time()`.
|
||||||
|
//
|
||||||
|
// Timestamp timestamp;
|
||||||
|
// timestamp.set_seconds(time(NULL));
|
||||||
|
// timestamp.set_nanos(0);
|
||||||
|
//
|
||||||
|
// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||||
|
//
|
||||||
|
// struct timeval tv;
|
||||||
|
// gettimeofday(&tv, NULL);
|
||||||
|
//
|
||||||
|
// Timestamp timestamp;
|
||||||
|
// timestamp.set_seconds(tv.tv_sec);
|
||||||
|
// timestamp.set_nanos(tv.tv_usec * 1000);
|
||||||
|
//
|
||||||
|
// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||||
|
//
|
||||||
|
// FILETIME ft;
|
||||||
|
// GetSystemTimeAsFileTime(&ft);
|
||||||
|
// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||||
|
//
|
||||||
|
// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||||
|
// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||||
|
// Timestamp timestamp;
|
||||||
|
// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||||
|
// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||||
|
//
|
||||||
|
// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||||
|
//
|
||||||
|
// long millis = System.currentTimeMillis();
|
||||||
|
//
|
||||||
|
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||||
|
// .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// Example 5: Compute Timestamp from current time in Python.
|
||||||
|
//
|
||||||
|
// now = time.time()
|
||||||
|
// seconds = int(now)
|
||||||
|
// nanos = int((now - seconds) * 10**9)
|
||||||
|
// timestamp = Timestamp(seconds=seconds, nanos=nanos)
|
||||||
|
//
|
||||||
|
//
|
||||||
|
message Timestamp {
|
||||||
|
|
||||||
|
// Represents seconds of UTC time since Unix epoch
|
||||||
|
// 1970-01-01T00:00:00Z. Must be from from 0001-01-01T00:00:00Z to
|
||||||
|
// 9999-12-31T23:59:59Z inclusive.
|
||||||
|
int64 seconds = 1;
|
||||||
|
|
||||||
|
// Non-negative fractions of a second at nanosecond resolution. Negative
|
||||||
|
// second values with fractions must still have non-negative nanos values
|
||||||
|
// that count forward in time. Must be from 0 to 999,999,999
|
||||||
|
// inclusive.
|
||||||
|
int32 nanos = 2;
|
||||||
|
}
|
12
vendor/github.com/google/btree/README.md
generated
vendored
Normal file
12
vendor/github.com/google/btree/README.md
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# BTree implementation for Go
|
||||||
|
|
||||||
|
![Travis CI Build Status](https://api.travis-ci.org/google/btree.svg?branch=master)
|
||||||
|
|
||||||
|
This package provides an in-memory B-Tree implementation for Go, useful as
|
||||||
|
an ordered, mutable data structure.
|
||||||
|
|
||||||
|
The API is based off of the wonderful
|
||||||
|
http://godoc.org/github.com/petar/GoLLRB/llrb, and is meant to allow btree to
|
||||||
|
act as a drop-in replacement for gollrb trees.
|
||||||
|
|
||||||
|
See http://godoc.org/github.com/google/btree for documentation.
|
649
vendor/github.com/google/btree/btree.go
generated
vendored
Normal file
649
vendor/github.com/google/btree/btree.go
generated
vendored
Normal file
|
@ -0,0 +1,649 @@
|
||||||
|
// Copyright 2014 Google Inc.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// Package btree implements in-memory B-Trees of arbitrary degree.
|
||||||
|
//
|
||||||
|
// btree implements an in-memory B-Tree for use as an ordered data structure.
|
||||||
|
// It is not meant for persistent storage solutions.
|
||||||
|
//
|
||||||
|
// It has a flatter structure than an equivalent red-black or other binary tree,
|
||||||
|
// which in some cases yields better memory usage and/or performance.
|
||||||
|
// See some discussion on the matter here:
|
||||||
|
// http://google-opensource.blogspot.com/2013/01/c-containers-that-save-memory-and-time.html
|
||||||
|
// Note, though, that this project is in no way related to the C++ B-Tree
|
||||||
|
// implmentation written about there.
|
||||||
|
//
|
||||||
|
// Within this tree, each node contains a slice of items and a (possibly nil)
|
||||||
|
// slice of children. For basic numeric values or raw structs, this can cause
|
||||||
|
// efficiency differences when compared to equivalent C++ template code that
|
||||||
|
// stores values in arrays within the node:
|
||||||
|
// * Due to the overhead of storing values as interfaces (each
|
||||||
|
// value needs to be stored as the value itself, then 2 words for the
|
||||||
|
// interface pointing to that value and its type), resulting in higher
|
||||||
|
// memory use.
|
||||||
|
// * Since interfaces can point to values anywhere in memory, values are
|
||||||
|
// most likely not stored in contiguous blocks, resulting in a higher
|
||||||
|
// number of cache misses.
|
||||||
|
// These issues don't tend to matter, though, when working with strings or other
|
||||||
|
// heap-allocated structures, since C++-equivalent structures also must store
|
||||||
|
// pointers and also distribute their values across the heap.
|
||||||
|
//
|
||||||
|
// This implementation is designed to be a drop-in replacement to gollrb.LLRB
|
||||||
|
// trees, (http://github.com/petar/gollrb), an excellent and probably the most
|
||||||
|
// widely used ordered tree implementation in the Go ecosystem currently.
|
||||||
|
// Its functions, therefore, exactly mirror those of
|
||||||
|
// llrb.LLRB where possible. Unlike gollrb, though, we currently don't
|
||||||
|
// support storing multiple equivalent values or backwards iteration.
|
||||||
|
package btree
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Item represents a single object in the tree.
|
||||||
|
type Item interface {
|
||||||
|
// Less tests whether the current item is less than the given argument.
|
||||||
|
//
|
||||||
|
// This must provide a strict weak ordering.
|
||||||
|
// If !a.Less(b) && !b.Less(a), we treat this to mean a == b (i.e. we can only
|
||||||
|
// hold one of either a or b in the tree).
|
||||||
|
Less(than Item) bool
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
DefaultFreeListSize = 32
|
||||||
|
)
|
||||||
|
|
||||||
|
// FreeList represents a free list of btree nodes. By default each
|
||||||
|
// BTree has its own FreeList, but multiple BTrees can share the same
|
||||||
|
// FreeList.
|
||||||
|
// Two Btrees using the same freelist are not safe for concurrent write access.
|
||||||
|
type FreeList struct {
|
||||||
|
freelist []*node
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewFreeList creates a new free list.
|
||||||
|
// size is the maximum size of the returned free list.
|
||||||
|
func NewFreeList(size int) *FreeList {
|
||||||
|
return &FreeList{freelist: make([]*node, 0, size)}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FreeList) newNode() (n *node) {
|
||||||
|
index := len(f.freelist) - 1
|
||||||
|
if index < 0 {
|
||||||
|
return new(node)
|
||||||
|
}
|
||||||
|
f.freelist, n = f.freelist[:index], f.freelist[index]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FreeList) freeNode(n *node) {
|
||||||
|
if len(f.freelist) < cap(f.freelist) {
|
||||||
|
f.freelist = append(f.freelist, n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ItemIterator allows callers of Ascend* to iterate in-order over portions of
|
||||||
|
// the tree. When this function returns false, iteration will stop and the
|
||||||
|
// associated Ascend* function will immediately return.
|
||||||
|
type ItemIterator func(i Item) bool
|
||||||
|
|
||||||
|
// New creates a new B-Tree with the given degree.
|
||||||
|
//
|
||||||
|
// New(2), for example, will create a 2-3-4 tree (each node contains 1-3 items
|
||||||
|
// and 2-4 children).
|
||||||
|
func New(degree int) *BTree {
|
||||||
|
return NewWithFreeList(degree, NewFreeList(DefaultFreeListSize))
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewWithFreeList creates a new B-Tree that uses the given node free list.
|
||||||
|
func NewWithFreeList(degree int, f *FreeList) *BTree {
|
||||||
|
if degree <= 1 {
|
||||||
|
panic("bad degree")
|
||||||
|
}
|
||||||
|
return &BTree{
|
||||||
|
degree: degree,
|
||||||
|
freelist: f,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// items stores items in a node.
|
||||||
|
type items []Item
|
||||||
|
|
||||||
|
// insertAt inserts a value into the given index, pushing all subsequent values
|
||||||
|
// forward.
|
||||||
|
func (s *items) insertAt(index int, item Item) {
|
||||||
|
*s = append(*s, nil)
|
||||||
|
if index < len(*s) {
|
||||||
|
copy((*s)[index+1:], (*s)[index:])
|
||||||
|
}
|
||||||
|
(*s)[index] = item
|
||||||
|
}
|
||||||
|
|
||||||
|
// removeAt removes a value at a given index, pulling all subsequent values
|
||||||
|
// back.
|
||||||
|
func (s *items) removeAt(index int) Item {
|
||||||
|
item := (*s)[index]
|
||||||
|
(*s)[index] = nil
|
||||||
|
copy((*s)[index:], (*s)[index+1:])
|
||||||
|
*s = (*s)[:len(*s)-1]
|
||||||
|
return item
|
||||||
|
}
|
||||||
|
|
||||||
|
// pop removes and returns the last element in the list.
|
||||||
|
func (s *items) pop() (out Item) {
|
||||||
|
index := len(*s) - 1
|
||||||
|
out = (*s)[index]
|
||||||
|
(*s)[index] = nil
|
||||||
|
*s = (*s)[:index]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// find returns the index where the given item should be inserted into this
|
||||||
|
// list. 'found' is true if the item already exists in the list at the given
|
||||||
|
// index.
|
||||||
|
func (s items) find(item Item) (index int, found bool) {
|
||||||
|
i := sort.Search(len(s), func(i int) bool {
|
||||||
|
return item.Less(s[i])
|
||||||
|
})
|
||||||
|
if i > 0 && !s[i-1].Less(item) {
|
||||||
|
return i - 1, true
|
||||||
|
}
|
||||||
|
return i, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// children stores child nodes in a node.
|
||||||
|
type children []*node
|
||||||
|
|
||||||
|
// insertAt inserts a value into the given index, pushing all subsequent values
|
||||||
|
// forward.
|
||||||
|
func (s *children) insertAt(index int, n *node) {
|
||||||
|
*s = append(*s, nil)
|
||||||
|
if index < len(*s) {
|
||||||
|
copy((*s)[index+1:], (*s)[index:])
|
||||||
|
}
|
||||||
|
(*s)[index] = n
|
||||||
|
}
|
||||||
|
|
||||||
|
// removeAt removes a value at a given index, pulling all subsequent values
|
||||||
|
// back.
|
||||||
|
func (s *children) removeAt(index int) *node {
|
||||||
|
n := (*s)[index]
|
||||||
|
(*s)[index] = nil
|
||||||
|
copy((*s)[index:], (*s)[index+1:])
|
||||||
|
*s = (*s)[:len(*s)-1]
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
// pop removes and returns the last element in the list.
|
||||||
|
func (s *children) pop() (out *node) {
|
||||||
|
index := len(*s) - 1
|
||||||
|
out = (*s)[index]
|
||||||
|
(*s)[index] = nil
|
||||||
|
*s = (*s)[:index]
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// node is an internal node in a tree.
|
||||||
|
//
|
||||||
|
// It must at all times maintain the invariant that either
|
||||||
|
// * len(children) == 0, len(items) unconstrained
|
||||||
|
// * len(children) == len(items) + 1
|
||||||
|
type node struct {
|
||||||
|
items items
|
||||||
|
children children
|
||||||
|
t *BTree
|
||||||
|
}
|
||||||
|
|
||||||
|
// split splits the given node at the given index. The current node shrinks,
|
||||||
|
// and this function returns the item that existed at that index and a new node
|
||||||
|
// containing all items/children after it.
|
||||||
|
func (n *node) split(i int) (Item, *node) {
|
||||||
|
item := n.items[i]
|
||||||
|
next := n.t.newNode()
|
||||||
|
next.items = append(next.items, n.items[i+1:]...)
|
||||||
|
n.items = n.items[:i]
|
||||||
|
if len(n.children) > 0 {
|
||||||
|
next.children = append(next.children, n.children[i+1:]...)
|
||||||
|
n.children = n.children[:i+1]
|
||||||
|
}
|
||||||
|
return item, next
|
||||||
|
}
|
||||||
|
|
||||||
|
// maybeSplitChild checks if a child should be split, and if so splits it.
|
||||||
|
// Returns whether or not a split occurred.
|
||||||
|
func (n *node) maybeSplitChild(i, maxItems int) bool {
|
||||||
|
if len(n.children[i].items) < maxItems {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
first := n.children[i]
|
||||||
|
item, second := first.split(maxItems / 2)
|
||||||
|
n.items.insertAt(i, item)
|
||||||
|
n.children.insertAt(i+1, second)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert inserts an item into the subtree rooted at this node, making sure
|
||||||
|
// no nodes in the subtree exceed maxItems items. Should an equivalent item be
|
||||||
|
// be found/replaced by insert, it will be returned.
|
||||||
|
func (n *node) insert(item Item, maxItems int) Item {
|
||||||
|
i, found := n.items.find(item)
|
||||||
|
if found {
|
||||||
|
out := n.items[i]
|
||||||
|
n.items[i] = item
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
if len(n.children) == 0 {
|
||||||
|
n.items.insertAt(i, item)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if n.maybeSplitChild(i, maxItems) {
|
||||||
|
inTree := n.items[i]
|
||||||
|
switch {
|
||||||
|
case item.Less(inTree):
|
||||||
|
// no change, we want first split node
|
||||||
|
case inTree.Less(item):
|
||||||
|
i++ // we want second split node
|
||||||
|
default:
|
||||||
|
out := n.items[i]
|
||||||
|
n.items[i] = item
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return n.children[i].insert(item, maxItems)
|
||||||
|
}
|
||||||
|
|
||||||
|
// get finds the given key in the subtree and returns it.
|
||||||
|
func (n *node) get(key Item) Item {
|
||||||
|
i, found := n.items.find(key)
|
||||||
|
if found {
|
||||||
|
return n.items[i]
|
||||||
|
} else if len(n.children) > 0 {
|
||||||
|
return n.children[i].get(key)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// min returns the first item in the subtree.
|
||||||
|
func min(n *node) Item {
|
||||||
|
if n == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for len(n.children) > 0 {
|
||||||
|
n = n.children[0]
|
||||||
|
}
|
||||||
|
if len(n.items) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return n.items[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
// max returns the last item in the subtree.
|
||||||
|
func max(n *node) Item {
|
||||||
|
if n == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for len(n.children) > 0 {
|
||||||
|
n = n.children[len(n.children)-1]
|
||||||
|
}
|
||||||
|
if len(n.items) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return n.items[len(n.items)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// toRemove details what item to remove in a node.remove call.
|
||||||
|
type toRemove int
|
||||||
|
|
||||||
|
const (
|
||||||
|
removeItem toRemove = iota // removes the given item
|
||||||
|
removeMin // removes smallest item in the subtree
|
||||||
|
removeMax // removes largest item in the subtree
|
||||||
|
)
|
||||||
|
|
||||||
|
// remove removes an item from the subtree rooted at this node.
|
||||||
|
func (n *node) remove(item Item, minItems int, typ toRemove) Item {
|
||||||
|
var i int
|
||||||
|
var found bool
|
||||||
|
switch typ {
|
||||||
|
case removeMax:
|
||||||
|
if len(n.children) == 0 {
|
||||||
|
return n.items.pop()
|
||||||
|
}
|
||||||
|
i = len(n.items)
|
||||||
|
case removeMin:
|
||||||
|
if len(n.children) == 0 {
|
||||||
|
return n.items.removeAt(0)
|
||||||
|
}
|
||||||
|
i = 0
|
||||||
|
case removeItem:
|
||||||
|
i, found = n.items.find(item)
|
||||||
|
if len(n.children) == 0 {
|
||||||
|
if found {
|
||||||
|
return n.items.removeAt(i)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
panic("invalid type")
|
||||||
|
}
|
||||||
|
// If we get to here, we have children.
|
||||||
|
child := n.children[i]
|
||||||
|
if len(child.items) <= minItems {
|
||||||
|
return n.growChildAndRemove(i, item, minItems, typ)
|
||||||
|
}
|
||||||
|
// Either we had enough items to begin with, or we've done some
|
||||||
|
// merging/stealing, because we've got enough now and we're ready to return
|
||||||
|
// stuff.
|
||||||
|
if found {
|
||||||
|
// The item exists at index 'i', and the child we've selected can give us a
|
||||||
|
// predecessor, since if we've gotten here it's got > minItems items in it.
|
||||||
|
out := n.items[i]
|
||||||
|
// We use our special-case 'remove' call with typ=maxItem to pull the
|
||||||
|
// predecessor of item i (the rightmost leaf of our immediate left child)
|
||||||
|
// and set it into where we pulled the item from.
|
||||||
|
n.items[i] = child.remove(nil, minItems, removeMax)
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
// Final recursive call. Once we're here, we know that the item isn't in this
|
||||||
|
// node and that the child is big enough to remove from.
|
||||||
|
return child.remove(item, minItems, typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
// growChildAndRemove grows child 'i' to make sure it's possible to remove an
|
||||||
|
// item from it while keeping it at minItems, then calls remove to actually
|
||||||
|
// remove it.
|
||||||
|
//
|
||||||
|
// Most documentation says we have to do two sets of special casing:
|
||||||
|
// 1) item is in this node
|
||||||
|
// 2) item is in child
|
||||||
|
// In both cases, we need to handle the two subcases:
|
||||||
|
// A) node has enough values that it can spare one
|
||||||
|
// B) node doesn't have enough values
|
||||||
|
// For the latter, we have to check:
|
||||||
|
// a) left sibling has node to spare
|
||||||
|
// b) right sibling has node to spare
|
||||||
|
// c) we must merge
|
||||||
|
// To simplify our code here, we handle cases #1 and #2 the same:
|
||||||
|
// If a node doesn't have enough items, we make sure it does (using a,b,c).
|
||||||
|
// We then simply redo our remove call, and the second time (regardless of
|
||||||
|
// whether we're in case 1 or 2), we'll have enough items and can guarantee
|
||||||
|
// that we hit case A.
|
||||||
|
func (n *node) growChildAndRemove(i int, item Item, minItems int, typ toRemove) Item {
|
||||||
|
child := n.children[i]
|
||||||
|
if i > 0 && len(n.children[i-1].items) > minItems {
|
||||||
|
// Steal from left child
|
||||||
|
stealFrom := n.children[i-1]
|
||||||
|
stolenItem := stealFrom.items.pop()
|
||||||
|
child.items.insertAt(0, n.items[i-1])
|
||||||
|
n.items[i-1] = stolenItem
|
||||||
|
if len(stealFrom.children) > 0 {
|
||||||
|
child.children.insertAt(0, stealFrom.children.pop())
|
||||||
|
}
|
||||||
|
} else if i < len(n.items) && len(n.children[i+1].items) > minItems {
|
||||||
|
// steal from right child
|
||||||
|
stealFrom := n.children[i+1]
|
||||||
|
stolenItem := stealFrom.items.removeAt(0)
|
||||||
|
child.items = append(child.items, n.items[i])
|
||||||
|
n.items[i] = stolenItem
|
||||||
|
if len(stealFrom.children) > 0 {
|
||||||
|
child.children = append(child.children, stealFrom.children.removeAt(0))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if i >= len(n.items) {
|
||||||
|
i--
|
||||||
|
child = n.children[i]
|
||||||
|
}
|
||||||
|
// merge with right child
|
||||||
|
mergeItem := n.items.removeAt(i)
|
||||||
|
mergeChild := n.children.removeAt(i + 1)
|
||||||
|
child.items = append(child.items, mergeItem)
|
||||||
|
child.items = append(child.items, mergeChild.items...)
|
||||||
|
child.children = append(child.children, mergeChild.children...)
|
||||||
|
n.t.freeNode(mergeChild)
|
||||||
|
}
|
||||||
|
return n.remove(item, minItems, typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
// iterate provides a simple method for iterating over elements in the tree.
|
||||||
|
// It could probably use some work to be extra-efficient (it calls from() a
|
||||||
|
// little more than it should), but it works pretty well for now.
|
||||||
|
//
|
||||||
|
// It requires that 'from' and 'to' both return true for values we should hit
|
||||||
|
// with the iterator. It should also be the case that 'from' returns true for
|
||||||
|
// values less than or equal to values 'to' returns true for, and 'to'
|
||||||
|
// returns true for values greater than or equal to those that 'from'
|
||||||
|
// does.
|
||||||
|
func (n *node) iterate(from, to func(Item) bool, iter ItemIterator) bool {
|
||||||
|
for i, item := range n.items {
|
||||||
|
if !from(item) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if len(n.children) > 0 && !n.children[i].iterate(from, to, iter) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !to(item) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !iter(item) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(n.children) > 0 {
|
||||||
|
return n.children[len(n.children)-1].iterate(from, to, iter)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used for testing/debugging purposes.
|
||||||
|
func (n *node) print(w io.Writer, level int) {
|
||||||
|
fmt.Fprintf(w, "%sNODE:%v\n", strings.Repeat(" ", level), n.items)
|
||||||
|
for _, c := range n.children {
|
||||||
|
c.print(w, level+1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// BTree is an implementation of a B-Tree.
|
||||||
|
//
|
||||||
|
// BTree stores Item instances in an ordered structure, allowing easy insertion,
|
||||||
|
// removal, and iteration.
|
||||||
|
//
|
||||||
|
// Write operations are not safe for concurrent mutation by multiple
|
||||||
|
// goroutines, but Read operations are.
|
||||||
|
type BTree struct {
|
||||||
|
degree int
|
||||||
|
length int
|
||||||
|
root *node
|
||||||
|
freelist *FreeList
|
||||||
|
}
|
||||||
|
|
||||||
|
// maxItems returns the max number of items to allow per node.
|
||||||
|
func (t *BTree) maxItems() int {
|
||||||
|
return t.degree*2 - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// minItems returns the min number of items to allow per node (ignored for the
|
||||||
|
// root node).
|
||||||
|
func (t *BTree) minItems() int {
|
||||||
|
return t.degree - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *BTree) newNode() (n *node) {
|
||||||
|
n = t.freelist.newNode()
|
||||||
|
n.t = t
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *BTree) freeNode(n *node) {
|
||||||
|
for i := range n.items {
|
||||||
|
n.items[i] = nil // clear to allow GC
|
||||||
|
}
|
||||||
|
n.items = n.items[:0]
|
||||||
|
for i := range n.children {
|
||||||
|
n.children[i] = nil // clear to allow GC
|
||||||
|
}
|
||||||
|
n.children = n.children[:0]
|
||||||
|
n.t = nil // clear to allow GC
|
||||||
|
t.freelist.freeNode(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReplaceOrInsert adds the given item to the tree. If an item in the tree
|
||||||
|
// already equals the given one, it is removed from the tree and returned.
|
||||||
|
// Otherwise, nil is returned.
|
||||||
|
//
|
||||||
|
// nil cannot be added to the tree (will panic).
|
||||||
|
func (t *BTree) ReplaceOrInsert(item Item) Item {
|
||||||
|
if item == nil {
|
||||||
|
panic("nil item being added to BTree")
|
||||||
|
}
|
||||||
|
if t.root == nil {
|
||||||
|
t.root = t.newNode()
|
||||||
|
t.root.items = append(t.root.items, item)
|
||||||
|
t.length++
|
||||||
|
return nil
|
||||||
|
} else if len(t.root.items) >= t.maxItems() {
|
||||||
|
item2, second := t.root.split(t.maxItems() / 2)
|
||||||
|
oldroot := t.root
|
||||||
|
t.root = t.newNode()
|
||||||
|
t.root.items = append(t.root.items, item2)
|
||||||
|
t.root.children = append(t.root.children, oldroot, second)
|
||||||
|
}
|
||||||
|
out := t.root.insert(item, t.maxItems())
|
||||||
|
if out == nil {
|
||||||
|
t.length++
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete removes an item equal to the passed in item from the tree, returning
|
||||||
|
// it. If no such item exists, returns nil.
|
||||||
|
func (t *BTree) Delete(item Item) Item {
|
||||||
|
return t.deleteItem(item, removeItem)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteMin removes the smallest item in the tree and returns it.
|
||||||
|
// If no such item exists, returns nil.
|
||||||
|
func (t *BTree) DeleteMin() Item {
|
||||||
|
return t.deleteItem(nil, removeMin)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteMax removes the largest item in the tree and returns it.
|
||||||
|
// If no such item exists, returns nil.
|
||||||
|
func (t *BTree) DeleteMax() Item {
|
||||||
|
return t.deleteItem(nil, removeMax)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *BTree) deleteItem(item Item, typ toRemove) Item {
|
||||||
|
if t.root == nil || len(t.root.items) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out := t.root.remove(item, t.minItems(), typ)
|
||||||
|
if len(t.root.items) == 0 && len(t.root.children) > 0 {
|
||||||
|
oldroot := t.root
|
||||||
|
t.root = t.root.children[0]
|
||||||
|
t.freeNode(oldroot)
|
||||||
|
}
|
||||||
|
if out != nil {
|
||||||
|
t.length--
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// AscendRange calls the iterator for every value in the tree within the range
|
||||||
|
// [greaterOrEqual, lessThan), until iterator returns false.
|
||||||
|
func (t *BTree) AscendRange(greaterOrEqual, lessThan Item, iterator ItemIterator) {
|
||||||
|
if t.root == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
t.root.iterate(
|
||||||
|
func(a Item) bool { return !a.Less(greaterOrEqual) },
|
||||||
|
func(a Item) bool { return a.Less(lessThan) },
|
||||||
|
iterator)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AscendLessThan calls the iterator for every value in the tree within the range
|
||||||
|
// [first, pivot), until iterator returns false.
|
||||||
|
func (t *BTree) AscendLessThan(pivot Item, iterator ItemIterator) {
|
||||||
|
if t.root == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
t.root.iterate(
|
||||||
|
func(a Item) bool { return true },
|
||||||
|
func(a Item) bool { return a.Less(pivot) },
|
||||||
|
iterator)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AscendGreaterOrEqual calls the iterator for every value in the tree within
|
||||||
|
// the range [pivot, last], until iterator returns false.
|
||||||
|
func (t *BTree) AscendGreaterOrEqual(pivot Item, iterator ItemIterator) {
|
||||||
|
if t.root == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
t.root.iterate(
|
||||||
|
func(a Item) bool { return !a.Less(pivot) },
|
||||||
|
func(a Item) bool { return true },
|
||||||
|
iterator)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ascend calls the iterator for every value in the tree within the range
|
||||||
|
// [first, last], until iterator returns false.
|
||||||
|
func (t *BTree) Ascend(iterator ItemIterator) {
|
||||||
|
if t.root == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
t.root.iterate(
|
||||||
|
func(a Item) bool { return true },
|
||||||
|
func(a Item) bool { return true },
|
||||||
|
iterator)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get looks for the key item in the tree, returning it. It returns nil if
|
||||||
|
// unable to find that item.
|
||||||
|
func (t *BTree) Get(key Item) Item {
|
||||||
|
if t.root == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return t.root.get(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Min returns the smallest item in the tree, or nil if the tree is empty.
|
||||||
|
func (t *BTree) Min() Item {
|
||||||
|
return min(t.root)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Max returns the largest item in the tree, or nil if the tree is empty.
|
||||||
|
func (t *BTree) Max() Item {
|
||||||
|
return max(t.root)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Has returns true if the given key is in the tree.
|
||||||
|
func (t *BTree) Has(key Item) bool {
|
||||||
|
return t.Get(key) != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Len returns the number of items currently in the tree.
|
||||||
|
func (t *BTree) Len() int {
|
||||||
|
return t.length
|
||||||
|
}
|
||||||
|
|
||||||
|
// Int implements the Item interface for integers.
|
||||||
|
type Int int
|
||||||
|
|
||||||
|
// Less returns true if int(a) < int(b).
|
||||||
|
func (a Int) Less(b Item) bool {
|
||||||
|
return a < b.(Int)
|
||||||
|
}
|
8728
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.go
generated
vendored
Normal file
8728
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
4456
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.pb.go
generated
vendored
Normal file
4456
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.pb.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
662
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.proto
generated
vendored
Normal file
662
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.proto
generated
vendored
Normal file
|
@ -0,0 +1,662 @@
|
||||||
|
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// THIS FILE IS AUTOMATICALLY GENERATED.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package openapi.v2;
|
||||||
|
|
||||||
|
import "google/protobuf/any.proto";
|
||||||
|
|
||||||
|
// This option lets the proto compiler generate Java code inside the package
|
||||||
|
// name (see below) instead of inside an outer class. It creates a simpler
|
||||||
|
// developer experience by reducing one-level of name nesting and be
|
||||||
|
// consistent with most programming languages that don't support outer classes.
|
||||||
|
option java_multiple_files = true;
|
||||||
|
|
||||||
|
// The Java outer classname should be the filename in UpperCamelCase. This
|
||||||
|
// class is only used to hold proto descriptor, so developers don't need to
|
||||||
|
// work with it directly.
|
||||||
|
option java_outer_classname = "OpenAPIProto";
|
||||||
|
|
||||||
|
// The Java package name must be proto package name with proper prefix.
|
||||||
|
option java_package = "org.openapi_v2";
|
||||||
|
|
||||||
|
// A reasonable prefix for the Objective-C symbols generated from the package.
|
||||||
|
// It should at a minimum be 3 characters long, all uppercase, and convention
|
||||||
|
// is to use an abbreviation of the package name. Something short, but
|
||||||
|
// hopefully unique enough to not conflict with things that may come along in
|
||||||
|
// the future. 'GPB' is reserved for the protocol buffer implementation itself.
|
||||||
|
option objc_class_prefix = "OAS";
|
||||||
|
|
||||||
|
message AdditionalPropertiesItem {
|
||||||
|
oneof oneof {
|
||||||
|
Schema schema = 1;
|
||||||
|
bool boolean = 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message Any {
|
||||||
|
google.protobuf.Any value = 1;
|
||||||
|
string yaml = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message ApiKeySecurity {
|
||||||
|
string type = 1;
|
||||||
|
string name = 2;
|
||||||
|
string in = 3;
|
||||||
|
string description = 4;
|
||||||
|
repeated NamedAny vendor_extension = 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
message BasicAuthenticationSecurity {
|
||||||
|
string type = 1;
|
||||||
|
string description = 2;
|
||||||
|
repeated NamedAny vendor_extension = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message BodyParameter {
|
||||||
|
// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed.
|
||||||
|
string description = 1;
|
||||||
|
// The name of the parameter.
|
||||||
|
string name = 2;
|
||||||
|
// Determines the location of the parameter.
|
||||||
|
string in = 3;
|
||||||
|
// Determines whether or not this parameter is required or optional.
|
||||||
|
bool required = 4;
|
||||||
|
Schema schema = 5;
|
||||||
|
repeated NamedAny vendor_extension = 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Contact information for the owners of the API.
|
||||||
|
message Contact {
|
||||||
|
// The identifying name of the contact person/organization.
|
||||||
|
string name = 1;
|
||||||
|
// The URL pointing to the contact information.
|
||||||
|
string url = 2;
|
||||||
|
// The email address of the contact person/organization.
|
||||||
|
string email = 3;
|
||||||
|
repeated NamedAny vendor_extension = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Default {
|
||||||
|
repeated NamedAny additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// One or more JSON objects describing the schemas being consumed and produced by the API.
|
||||||
|
message Definitions {
|
||||||
|
repeated NamedSchema additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Document {
|
||||||
|
// The Swagger version of this document.
|
||||||
|
string swagger = 1;
|
||||||
|
Info info = 2;
|
||||||
|
// The host (name or ip) of the API. Example: 'swagger.io'
|
||||||
|
string host = 3;
|
||||||
|
// The base path to the API. Example: '/api'.
|
||||||
|
string base_path = 4;
|
||||||
|
// The transfer protocol of the API.
|
||||||
|
repeated string schemes = 5;
|
||||||
|
// A list of MIME types accepted by the API.
|
||||||
|
repeated string consumes = 6;
|
||||||
|
// A list of MIME types the API can produce.
|
||||||
|
repeated string produces = 7;
|
||||||
|
Paths paths = 8;
|
||||||
|
Definitions definitions = 9;
|
||||||
|
ParameterDefinitions parameters = 10;
|
||||||
|
ResponseDefinitions responses = 11;
|
||||||
|
repeated SecurityRequirement security = 12;
|
||||||
|
SecurityDefinitions security_definitions = 13;
|
||||||
|
repeated Tag tags = 14;
|
||||||
|
ExternalDocs external_docs = 15;
|
||||||
|
repeated NamedAny vendor_extension = 16;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Examples {
|
||||||
|
repeated NamedAny additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// information about external documentation
|
||||||
|
message ExternalDocs {
|
||||||
|
string description = 1;
|
||||||
|
string url = 2;
|
||||||
|
repeated NamedAny vendor_extension = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
// A deterministic version of a JSON Schema object.
|
||||||
|
message FileSchema {
|
||||||
|
string format = 1;
|
||||||
|
string title = 2;
|
||||||
|
string description = 3;
|
||||||
|
Any default = 4;
|
||||||
|
repeated string required = 5;
|
||||||
|
string type = 6;
|
||||||
|
bool read_only = 7;
|
||||||
|
ExternalDocs external_docs = 8;
|
||||||
|
Any example = 9;
|
||||||
|
repeated NamedAny vendor_extension = 10;
|
||||||
|
}
|
||||||
|
|
||||||
|
message FormDataParameterSubSchema {
|
||||||
|
// Determines whether or not this parameter is required or optional.
|
||||||
|
bool required = 1;
|
||||||
|
// Determines the location of the parameter.
|
||||||
|
string in = 2;
|
||||||
|
// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed.
|
||||||
|
string description = 3;
|
||||||
|
// The name of the parameter.
|
||||||
|
string name = 4;
|
||||||
|
// allows sending a parameter by name only or with an empty value.
|
||||||
|
bool allow_empty_value = 5;
|
||||||
|
string type = 6;
|
||||||
|
string format = 7;
|
||||||
|
PrimitivesItems items = 8;
|
||||||
|
string collection_format = 9;
|
||||||
|
Any default = 10;
|
||||||
|
double maximum = 11;
|
||||||
|
bool exclusive_maximum = 12;
|
||||||
|
double minimum = 13;
|
||||||
|
bool exclusive_minimum = 14;
|
||||||
|
int64 max_length = 15;
|
||||||
|
int64 min_length = 16;
|
||||||
|
string pattern = 17;
|
||||||
|
int64 max_items = 18;
|
||||||
|
int64 min_items = 19;
|
||||||
|
bool unique_items = 20;
|
||||||
|
repeated Any enum = 21;
|
||||||
|
double multiple_of = 22;
|
||||||
|
repeated NamedAny vendor_extension = 23;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Header {
|
||||||
|
string type = 1;
|
||||||
|
string format = 2;
|
||||||
|
PrimitivesItems items = 3;
|
||||||
|
string collection_format = 4;
|
||||||
|
Any default = 5;
|
||||||
|
double maximum = 6;
|
||||||
|
bool exclusive_maximum = 7;
|
||||||
|
double minimum = 8;
|
||||||
|
bool exclusive_minimum = 9;
|
||||||
|
int64 max_length = 10;
|
||||||
|
int64 min_length = 11;
|
||||||
|
string pattern = 12;
|
||||||
|
int64 max_items = 13;
|
||||||
|
int64 min_items = 14;
|
||||||
|
bool unique_items = 15;
|
||||||
|
repeated Any enum = 16;
|
||||||
|
double multiple_of = 17;
|
||||||
|
string description = 18;
|
||||||
|
repeated NamedAny vendor_extension = 19;
|
||||||
|
}
|
||||||
|
|
||||||
|
message HeaderParameterSubSchema {
|
||||||
|
// Determines whether or not this parameter is required or optional.
|
||||||
|
bool required = 1;
|
||||||
|
// Determines the location of the parameter.
|
||||||
|
string in = 2;
|
||||||
|
// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed.
|
||||||
|
string description = 3;
|
||||||
|
// The name of the parameter.
|
||||||
|
string name = 4;
|
||||||
|
string type = 5;
|
||||||
|
string format = 6;
|
||||||
|
PrimitivesItems items = 7;
|
||||||
|
string collection_format = 8;
|
||||||
|
Any default = 9;
|
||||||
|
double maximum = 10;
|
||||||
|
bool exclusive_maximum = 11;
|
||||||
|
double minimum = 12;
|
||||||
|
bool exclusive_minimum = 13;
|
||||||
|
int64 max_length = 14;
|
||||||
|
int64 min_length = 15;
|
||||||
|
string pattern = 16;
|
||||||
|
int64 max_items = 17;
|
||||||
|
int64 min_items = 18;
|
||||||
|
bool unique_items = 19;
|
||||||
|
repeated Any enum = 20;
|
||||||
|
double multiple_of = 21;
|
||||||
|
repeated NamedAny vendor_extension = 22;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Headers {
|
||||||
|
repeated NamedHeader additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// General information about the API.
|
||||||
|
message Info {
|
||||||
|
// A unique and precise title of the API.
|
||||||
|
string title = 1;
|
||||||
|
// A semantic version number of the API.
|
||||||
|
string version = 2;
|
||||||
|
// A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed.
|
||||||
|
string description = 3;
|
||||||
|
// The terms of service for the API.
|
||||||
|
string terms_of_service = 4;
|
||||||
|
Contact contact = 5;
|
||||||
|
License license = 6;
|
||||||
|
repeated NamedAny vendor_extension = 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
message ItemsItem {
|
||||||
|
repeated Schema schema = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message JsonReference {
|
||||||
|
string _ref = 1;
|
||||||
|
string description = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message License {
|
||||||
|
// The name of the license type. It's encouraged to use an OSI compatible license.
|
||||||
|
string name = 1;
|
||||||
|
// The URL pointing to the license.
|
||||||
|
string url = 2;
|
||||||
|
repeated NamedAny vendor_extension = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs.
|
||||||
|
message NamedAny {
|
||||||
|
// Map key
|
||||||
|
string name = 1;
|
||||||
|
// Mapped value
|
||||||
|
Any value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Automatically-generated message used to represent maps of Header as ordered (name,value) pairs.
|
||||||
|
message NamedHeader {
|
||||||
|
// Map key
|
||||||
|
string name = 1;
|
||||||
|
// Mapped value
|
||||||
|
Header value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs.
|
||||||
|
message NamedParameter {
|
||||||
|
// Map key
|
||||||
|
string name = 1;
|
||||||
|
// Mapped value
|
||||||
|
Parameter value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs.
|
||||||
|
message NamedPathItem {
|
||||||
|
// Map key
|
||||||
|
string name = 1;
|
||||||
|
// Mapped value
|
||||||
|
PathItem value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Automatically-generated message used to represent maps of Response as ordered (name,value) pairs.
|
||||||
|
message NamedResponse {
|
||||||
|
// Map key
|
||||||
|
string name = 1;
|
||||||
|
// Mapped value
|
||||||
|
Response value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Automatically-generated message used to represent maps of ResponseValue as ordered (name,value) pairs.
|
||||||
|
message NamedResponseValue {
|
||||||
|
// Map key
|
||||||
|
string name = 1;
|
||||||
|
// Mapped value
|
||||||
|
ResponseValue value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs.
|
||||||
|
message NamedSchema {
|
||||||
|
// Map key
|
||||||
|
string name = 1;
|
||||||
|
// Mapped value
|
||||||
|
Schema value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Automatically-generated message used to represent maps of SecurityDefinitionsItem as ordered (name,value) pairs.
|
||||||
|
message NamedSecurityDefinitionsItem {
|
||||||
|
// Map key
|
||||||
|
string name = 1;
|
||||||
|
// Mapped value
|
||||||
|
SecurityDefinitionsItem value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Automatically-generated message used to represent maps of string as ordered (name,value) pairs.
|
||||||
|
message NamedString {
|
||||||
|
// Map key
|
||||||
|
string name = 1;
|
||||||
|
// Mapped value
|
||||||
|
string value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Automatically-generated message used to represent maps of StringArray as ordered (name,value) pairs.
|
||||||
|
message NamedStringArray {
|
||||||
|
// Map key
|
||||||
|
string name = 1;
|
||||||
|
// Mapped value
|
||||||
|
StringArray value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message NonBodyParameter {
|
||||||
|
oneof oneof {
|
||||||
|
HeaderParameterSubSchema header_parameter_sub_schema = 1;
|
||||||
|
FormDataParameterSubSchema form_data_parameter_sub_schema = 2;
|
||||||
|
QueryParameterSubSchema query_parameter_sub_schema = 3;
|
||||||
|
PathParameterSubSchema path_parameter_sub_schema = 4;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message Oauth2AccessCodeSecurity {
|
||||||
|
string type = 1;
|
||||||
|
string flow = 2;
|
||||||
|
Oauth2Scopes scopes = 3;
|
||||||
|
string authorization_url = 4;
|
||||||
|
string token_url = 5;
|
||||||
|
string description = 6;
|
||||||
|
repeated NamedAny vendor_extension = 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Oauth2ApplicationSecurity {
|
||||||
|
string type = 1;
|
||||||
|
string flow = 2;
|
||||||
|
Oauth2Scopes scopes = 3;
|
||||||
|
string token_url = 4;
|
||||||
|
string description = 5;
|
||||||
|
repeated NamedAny vendor_extension = 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Oauth2ImplicitSecurity {
|
||||||
|
string type = 1;
|
||||||
|
string flow = 2;
|
||||||
|
Oauth2Scopes scopes = 3;
|
||||||
|
string authorization_url = 4;
|
||||||
|
string description = 5;
|
||||||
|
repeated NamedAny vendor_extension = 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Oauth2PasswordSecurity {
|
||||||
|
string type = 1;
|
||||||
|
string flow = 2;
|
||||||
|
Oauth2Scopes scopes = 3;
|
||||||
|
string token_url = 4;
|
||||||
|
string description = 5;
|
||||||
|
repeated NamedAny vendor_extension = 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Oauth2Scopes {
|
||||||
|
repeated NamedString additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Operation {
|
||||||
|
repeated string tags = 1;
|
||||||
|
// A brief summary of the operation.
|
||||||
|
string summary = 2;
|
||||||
|
// A longer description of the operation, GitHub Flavored Markdown is allowed.
|
||||||
|
string description = 3;
|
||||||
|
ExternalDocs external_docs = 4;
|
||||||
|
// A unique identifier of the operation.
|
||||||
|
string operation_id = 5;
|
||||||
|
// A list of MIME types the API can produce.
|
||||||
|
repeated string produces = 6;
|
||||||
|
// A list of MIME types the API can consume.
|
||||||
|
repeated string consumes = 7;
|
||||||
|
// The parameters needed to send a valid API call.
|
||||||
|
repeated ParametersItem parameters = 8;
|
||||||
|
Responses responses = 9;
|
||||||
|
// The transfer protocol of the API.
|
||||||
|
repeated string schemes = 10;
|
||||||
|
bool deprecated = 11;
|
||||||
|
repeated SecurityRequirement security = 12;
|
||||||
|
repeated NamedAny vendor_extension = 13;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Parameter {
|
||||||
|
oneof oneof {
|
||||||
|
BodyParameter body_parameter = 1;
|
||||||
|
NonBodyParameter non_body_parameter = 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// One or more JSON representations for parameters
|
||||||
|
message ParameterDefinitions {
|
||||||
|
repeated NamedParameter additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message ParametersItem {
|
||||||
|
oneof oneof {
|
||||||
|
Parameter parameter = 1;
|
||||||
|
JsonReference json_reference = 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message PathItem {
|
||||||
|
string _ref = 1;
|
||||||
|
Operation get = 2;
|
||||||
|
Operation put = 3;
|
||||||
|
Operation post = 4;
|
||||||
|
Operation delete = 5;
|
||||||
|
Operation options = 6;
|
||||||
|
Operation head = 7;
|
||||||
|
Operation patch = 8;
|
||||||
|
// The parameters needed to send a valid API call.
|
||||||
|
repeated ParametersItem parameters = 9;
|
||||||
|
repeated NamedAny vendor_extension = 10;
|
||||||
|
}
|
||||||
|
|
||||||
|
message PathParameterSubSchema {
|
||||||
|
// Determines whether or not this parameter is required or optional.
|
||||||
|
bool required = 1;
|
||||||
|
// Determines the location of the parameter.
|
||||||
|
string in = 2;
|
||||||
|
// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed.
|
||||||
|
string description = 3;
|
||||||
|
// The name of the parameter.
|
||||||
|
string name = 4;
|
||||||
|
string type = 5;
|
||||||
|
string format = 6;
|
||||||
|
PrimitivesItems items = 7;
|
||||||
|
string collection_format = 8;
|
||||||
|
Any default = 9;
|
||||||
|
double maximum = 10;
|
||||||
|
bool exclusive_maximum = 11;
|
||||||
|
double minimum = 12;
|
||||||
|
bool exclusive_minimum = 13;
|
||||||
|
int64 max_length = 14;
|
||||||
|
int64 min_length = 15;
|
||||||
|
string pattern = 16;
|
||||||
|
int64 max_items = 17;
|
||||||
|
int64 min_items = 18;
|
||||||
|
bool unique_items = 19;
|
||||||
|
repeated Any enum = 20;
|
||||||
|
double multiple_of = 21;
|
||||||
|
repeated NamedAny vendor_extension = 22;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Relative paths to the individual endpoints. They must be relative to the 'basePath'.
|
||||||
|
message Paths {
|
||||||
|
repeated NamedAny vendor_extension = 1;
|
||||||
|
repeated NamedPathItem path = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message PrimitivesItems {
|
||||||
|
string type = 1;
|
||||||
|
string format = 2;
|
||||||
|
PrimitivesItems items = 3;
|
||||||
|
string collection_format = 4;
|
||||||
|
Any default = 5;
|
||||||
|
double maximum = 6;
|
||||||
|
bool exclusive_maximum = 7;
|
||||||
|
double minimum = 8;
|
||||||
|
bool exclusive_minimum = 9;
|
||||||
|
int64 max_length = 10;
|
||||||
|
int64 min_length = 11;
|
||||||
|
string pattern = 12;
|
||||||
|
int64 max_items = 13;
|
||||||
|
int64 min_items = 14;
|
||||||
|
bool unique_items = 15;
|
||||||
|
repeated Any enum = 16;
|
||||||
|
double multiple_of = 17;
|
||||||
|
repeated NamedAny vendor_extension = 18;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Properties {
|
||||||
|
repeated NamedSchema additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message QueryParameterSubSchema {
|
||||||
|
// Determines whether or not this parameter is required or optional.
|
||||||
|
bool required = 1;
|
||||||
|
// Determines the location of the parameter.
|
||||||
|
string in = 2;
|
||||||
|
// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed.
|
||||||
|
string description = 3;
|
||||||
|
// The name of the parameter.
|
||||||
|
string name = 4;
|
||||||
|
// allows sending a parameter by name only or with an empty value.
|
||||||
|
bool allow_empty_value = 5;
|
||||||
|
string type = 6;
|
||||||
|
string format = 7;
|
||||||
|
PrimitivesItems items = 8;
|
||||||
|
string collection_format = 9;
|
||||||
|
Any default = 10;
|
||||||
|
double maximum = 11;
|
||||||
|
bool exclusive_maximum = 12;
|
||||||
|
double minimum = 13;
|
||||||
|
bool exclusive_minimum = 14;
|
||||||
|
int64 max_length = 15;
|
||||||
|
int64 min_length = 16;
|
||||||
|
string pattern = 17;
|
||||||
|
int64 max_items = 18;
|
||||||
|
int64 min_items = 19;
|
||||||
|
bool unique_items = 20;
|
||||||
|
repeated Any enum = 21;
|
||||||
|
double multiple_of = 22;
|
||||||
|
repeated NamedAny vendor_extension = 23;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Response {
|
||||||
|
string description = 1;
|
||||||
|
SchemaItem schema = 2;
|
||||||
|
Headers headers = 3;
|
||||||
|
Examples examples = 4;
|
||||||
|
repeated NamedAny vendor_extension = 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
// One or more JSON representations for parameters
|
||||||
|
message ResponseDefinitions {
|
||||||
|
repeated NamedResponse additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message ResponseValue {
|
||||||
|
oneof oneof {
|
||||||
|
Response response = 1;
|
||||||
|
JsonReference json_reference = 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Response objects names can either be any valid HTTP status code or 'default'.
|
||||||
|
message Responses {
|
||||||
|
repeated NamedResponseValue response_code = 1;
|
||||||
|
repeated NamedAny vendor_extension = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// A deterministic version of a JSON Schema object.
|
||||||
|
message Schema {
|
||||||
|
string _ref = 1;
|
||||||
|
string format = 2;
|
||||||
|
string title = 3;
|
||||||
|
string description = 4;
|
||||||
|
Any default = 5;
|
||||||
|
double multiple_of = 6;
|
||||||
|
double maximum = 7;
|
||||||
|
bool exclusive_maximum = 8;
|
||||||
|
double minimum = 9;
|
||||||
|
bool exclusive_minimum = 10;
|
||||||
|
int64 max_length = 11;
|
||||||
|
int64 min_length = 12;
|
||||||
|
string pattern = 13;
|
||||||
|
int64 max_items = 14;
|
||||||
|
int64 min_items = 15;
|
||||||
|
bool unique_items = 16;
|
||||||
|
int64 max_properties = 17;
|
||||||
|
int64 min_properties = 18;
|
||||||
|
repeated string required = 19;
|
||||||
|
repeated Any enum = 20;
|
||||||
|
AdditionalPropertiesItem additional_properties = 21;
|
||||||
|
TypeItem type = 22;
|
||||||
|
ItemsItem items = 23;
|
||||||
|
repeated Schema all_of = 24;
|
||||||
|
Properties properties = 25;
|
||||||
|
string discriminator = 26;
|
||||||
|
bool read_only = 27;
|
||||||
|
Xml xml = 28;
|
||||||
|
ExternalDocs external_docs = 29;
|
||||||
|
Any example = 30;
|
||||||
|
repeated NamedAny vendor_extension = 31;
|
||||||
|
}
|
||||||
|
|
||||||
|
message SchemaItem {
|
||||||
|
oneof oneof {
|
||||||
|
Schema schema = 1;
|
||||||
|
FileSchema file_schema = 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message SecurityDefinitions {
|
||||||
|
repeated NamedSecurityDefinitionsItem additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message SecurityDefinitionsItem {
|
||||||
|
oneof oneof {
|
||||||
|
BasicAuthenticationSecurity basic_authentication_security = 1;
|
||||||
|
ApiKeySecurity api_key_security = 2;
|
||||||
|
Oauth2ImplicitSecurity oauth2_implicit_security = 3;
|
||||||
|
Oauth2PasswordSecurity oauth2_password_security = 4;
|
||||||
|
Oauth2ApplicationSecurity oauth2_application_security = 5;
|
||||||
|
Oauth2AccessCodeSecurity oauth2_access_code_security = 6;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
message SecurityRequirement {
|
||||||
|
repeated NamedStringArray additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message StringArray {
|
||||||
|
repeated string value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Tag {
|
||||||
|
string name = 1;
|
||||||
|
string description = 2;
|
||||||
|
ExternalDocs external_docs = 3;
|
||||||
|
repeated NamedAny vendor_extension = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
message TypeItem {
|
||||||
|
repeated string value = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Any property starting with x- is valid.
|
||||||
|
message VendorExtension {
|
||||||
|
repeated NamedAny additional_properties = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Xml {
|
||||||
|
string name = 1;
|
||||||
|
string namespace = 2;
|
||||||
|
string prefix = 3;
|
||||||
|
bool attribute = 4;
|
||||||
|
bool wrapped = 5;
|
||||||
|
repeated NamedAny vendor_extension = 6;
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue