update vendor
Signed-off-by: Jess Frazelle <acidburn@microsoft.com>
This commit is contained in:
parent
19a32db84d
commit
94d1cfbfbf
10501 changed files with 2307943 additions and 29279 deletions
2666
vendor/google.golang.org/genproto/googleapis/genomics/v1/annotations.pb.go
generated
vendored
Normal file
2666
vendor/google.golang.org/genproto/googleapis/genomics/v1/annotations.pb.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
203
vendor/google.golang.org/genproto/googleapis/genomics/v1/cigar.pb.go
generated
vendored
Normal file
203
vendor/google.golang.org/genproto/googleapis/genomics/v1/cigar.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,203 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/cigar.proto
|
||||
|
||||
package genomics
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
math "math"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// Describes the different types of CIGAR alignment operations that exist.
|
||||
// Used wherever CIGAR alignments are used.
|
||||
type CigarUnit_Operation int32
|
||||
|
||||
const (
|
||||
CigarUnit_OPERATION_UNSPECIFIED CigarUnit_Operation = 0
|
||||
// An alignment match indicates that a sequence can be aligned to the
|
||||
// reference without evidence of an INDEL. Unlike the
|
||||
// `SEQUENCE_MATCH` and `SEQUENCE_MISMATCH` operators,
|
||||
// the `ALIGNMENT_MATCH` operator does not indicate whether the
|
||||
// reference and read sequences are an exact match. This operator is
|
||||
// equivalent to SAM's `M`.
|
||||
CigarUnit_ALIGNMENT_MATCH CigarUnit_Operation = 1
|
||||
// The insert operator indicates that the read contains evidence of bases
|
||||
// being inserted into the reference. This operator is equivalent to SAM's
|
||||
// `I`.
|
||||
CigarUnit_INSERT CigarUnit_Operation = 2
|
||||
// The delete operator indicates that the read contains evidence of bases
|
||||
// being deleted from the reference. This operator is equivalent to SAM's
|
||||
// `D`.
|
||||
CigarUnit_DELETE CigarUnit_Operation = 3
|
||||
// The skip operator indicates that this read skips a long segment of the
|
||||
// reference, but the bases have not been deleted. This operator is commonly
|
||||
// used when working with RNA-seq data, where reads may skip long segments
|
||||
// of the reference between exons. This operator is equivalent to SAM's
|
||||
// `N`.
|
||||
CigarUnit_SKIP CigarUnit_Operation = 4
|
||||
// The soft clip operator indicates that bases at the start/end of a read
|
||||
// have not been considered during alignment. This may occur if the majority
|
||||
// of a read maps, except for low quality bases at the start/end of a read.
|
||||
// This operator is equivalent to SAM's `S`. Bases that are soft
|
||||
// clipped will still be stored in the read.
|
||||
CigarUnit_CLIP_SOFT CigarUnit_Operation = 5
|
||||
// The hard clip operator indicates that bases at the start/end of a read
|
||||
// have been omitted from this alignment. This may occur if this linear
|
||||
// alignment is part of a chimeric alignment, or if the read has been
|
||||
// trimmed (for example, during error correction or to trim poly-A tails for
|
||||
// RNA-seq). This operator is equivalent to SAM's `H`.
|
||||
CigarUnit_CLIP_HARD CigarUnit_Operation = 6
|
||||
// The pad operator indicates that there is padding in an alignment. This
|
||||
// operator is equivalent to SAM's `P`.
|
||||
CigarUnit_PAD CigarUnit_Operation = 7
|
||||
// This operator indicates that this portion of the aligned sequence exactly
|
||||
// matches the reference. This operator is equivalent to SAM's `=`.
|
||||
CigarUnit_SEQUENCE_MATCH CigarUnit_Operation = 8
|
||||
// This operator indicates that this portion of the aligned sequence is an
|
||||
// alignment match to the reference, but a sequence mismatch. This can
|
||||
// indicate a SNP or a read error. This operator is equivalent to SAM's
|
||||
// `X`.
|
||||
CigarUnit_SEQUENCE_MISMATCH CigarUnit_Operation = 9
|
||||
)
|
||||
|
||||
var CigarUnit_Operation_name = map[int32]string{
|
||||
0: "OPERATION_UNSPECIFIED",
|
||||
1: "ALIGNMENT_MATCH",
|
||||
2: "INSERT",
|
||||
3: "DELETE",
|
||||
4: "SKIP",
|
||||
5: "CLIP_SOFT",
|
||||
6: "CLIP_HARD",
|
||||
7: "PAD",
|
||||
8: "SEQUENCE_MATCH",
|
||||
9: "SEQUENCE_MISMATCH",
|
||||
}
|
||||
|
||||
var CigarUnit_Operation_value = map[string]int32{
|
||||
"OPERATION_UNSPECIFIED": 0,
|
||||
"ALIGNMENT_MATCH": 1,
|
||||
"INSERT": 2,
|
||||
"DELETE": 3,
|
||||
"SKIP": 4,
|
||||
"CLIP_SOFT": 5,
|
||||
"CLIP_HARD": 6,
|
||||
"PAD": 7,
|
||||
"SEQUENCE_MATCH": 8,
|
||||
"SEQUENCE_MISMATCH": 9,
|
||||
}
|
||||
|
||||
func (x CigarUnit_Operation) String() string {
|
||||
return proto.EnumName(CigarUnit_Operation_name, int32(x))
|
||||
}
|
||||
|
||||
func (CigarUnit_Operation) EnumDescriptor() ([]byte, []int) {
|
||||
return fileDescriptor_353bda9a6ec457b8, []int{0, 0}
|
||||
}
|
||||
|
||||
// A single CIGAR operation.
|
||||
type CigarUnit struct {
|
||||
Operation CigarUnit_Operation `protobuf:"varint,1,opt,name=operation,proto3,enum=google.genomics.v1.CigarUnit_Operation" json:"operation,omitempty"`
|
||||
// The number of genomic bases that the operation runs for. Required.
|
||||
OperationLength int64 `protobuf:"varint,2,opt,name=operation_length,json=operationLength,proto3" json:"operation_length,omitempty"`
|
||||
// `referenceSequence` is only used at mismatches
|
||||
// (`SEQUENCE_MISMATCH`) and deletions (`DELETE`).
|
||||
// Filling this field replaces SAM's MD tag. If the relevant information is
|
||||
// not available, this field is unset.
|
||||
ReferenceSequence string `protobuf:"bytes,3,opt,name=reference_sequence,json=referenceSequence,proto3" json:"reference_sequence,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *CigarUnit) Reset() { *m = CigarUnit{} }
|
||||
func (m *CigarUnit) String() string { return proto.CompactTextString(m) }
|
||||
func (*CigarUnit) ProtoMessage() {}
|
||||
func (*CigarUnit) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_353bda9a6ec457b8, []int{0}
|
||||
}
|
||||
|
||||
func (m *CigarUnit) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_CigarUnit.Unmarshal(m, b)
|
||||
}
|
||||
func (m *CigarUnit) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_CigarUnit.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *CigarUnit) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_CigarUnit.Merge(m, src)
|
||||
}
|
||||
func (m *CigarUnit) XXX_Size() int {
|
||||
return xxx_messageInfo_CigarUnit.Size(m)
|
||||
}
|
||||
func (m *CigarUnit) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_CigarUnit.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_CigarUnit proto.InternalMessageInfo
|
||||
|
||||
func (m *CigarUnit) GetOperation() CigarUnit_Operation {
|
||||
if m != nil {
|
||||
return m.Operation
|
||||
}
|
||||
return CigarUnit_OPERATION_UNSPECIFIED
|
||||
}
|
||||
|
||||
func (m *CigarUnit) GetOperationLength() int64 {
|
||||
if m != nil {
|
||||
return m.OperationLength
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *CigarUnit) GetReferenceSequence() string {
|
||||
if m != nil {
|
||||
return m.ReferenceSequence
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterEnum("google.genomics.v1.CigarUnit_Operation", CigarUnit_Operation_name, CigarUnit_Operation_value)
|
||||
proto.RegisterType((*CigarUnit)(nil), "google.genomics.v1.CigarUnit")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/cigar.proto", fileDescriptor_353bda9a6ec457b8) }
|
||||
|
||||
var fileDescriptor_353bda9a6ec457b8 = []byte{
|
||||
// 367 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x51, 0xcf, 0x0e, 0x93, 0x30,
|
||||
0x1c, 0xb6, 0x63, 0x6e, 0xe3, 0x97, 0xb8, 0x75, 0x35, 0x33, 0xd3, 0x18, 0xb3, 0xec, 0xe2, 0x3c,
|
||||
0x08, 0x99, 0xde, 0xf4, 0xc4, 0xa0, 0x73, 0x8d, 0x0c, 0x10, 0xd8, 0xc5, 0x0b, 0x41, 0x52, 0x91,
|
||||
0x64, 0x6b, 0x11, 0x70, 0xaf, 0xe5, 0x1b, 0xf9, 0x1c, 0x1e, 0x0d, 0x30, 0x98, 0x89, 0xde, 0xbe,
|
||||
0x7e, 0xff, 0x9a, 0xfc, 0x3e, 0x78, 0x91, 0x4a, 0x99, 0x9e, 0xb9, 0x9e, 0x72, 0x21, 0x2f, 0x59,
|
||||
0x52, 0xea, 0xd7, 0xad, 0x9e, 0x64, 0x69, 0x5c, 0x68, 0x79, 0x21, 0x2b, 0x49, 0x48, 0xab, 0x6b,
|
||||
0x9d, 0xae, 0x5d, 0xb7, 0xcf, 0x9e, 0xdf, 0x32, 0x71, 0x9e, 0xe9, 0xb1, 0x10, 0xb2, 0x8a, 0xab,
|
||||
0x4c, 0x8a, 0xb2, 0x4d, 0xac, 0x7f, 0x0d, 0x40, 0x35, 0xeb, 0x86, 0x93, 0xc8, 0x2a, 0x42, 0x41,
|
||||
0x95, 0x39, 0x2f, 0x1a, 0xc7, 0x12, 0xad, 0xd0, 0x66, 0xfa, 0xe6, 0xa5, 0xf6, 0x6f, 0xa7, 0xd6,
|
||||
0x27, 0x34, 0xb7, 0xb3, 0xfb, 0xf7, 0x24, 0x79, 0x05, 0xb8, 0x7f, 0x44, 0x67, 0x2e, 0xd2, 0xea,
|
||||
0xdb, 0x72, 0xb0, 0x42, 0x1b, 0xc5, 0x9f, 0xf5, 0xbc, 0xdd, 0xd0, 0xe4, 0x35, 0x90, 0x82, 0x7f,
|
||||
0xe5, 0x05, 0x17, 0x09, 0x8f, 0x4a, 0xfe, 0xfd, 0x47, 0x0d, 0x96, 0xca, 0x0a, 0x6d, 0x54, 0x7f,
|
||||
0xde, 0x2b, 0xc1, 0x4d, 0x58, 0xff, 0x44, 0xa0, 0xf6, 0x5f, 0x92, 0xa7, 0xb0, 0x70, 0x3d, 0xea,
|
||||
0x1b, 0x21, 0x73, 0x9d, 0xe8, 0xe4, 0x04, 0x1e, 0x35, 0xd9, 0x9e, 0x51, 0x0b, 0x3f, 0x20, 0x8f,
|
||||
0x61, 0x66, 0xd8, 0xec, 0x83, 0x73, 0xa4, 0x4e, 0x18, 0x1d, 0x8d, 0xd0, 0x3c, 0x60, 0x44, 0x00,
|
||||
0x46, 0xcc, 0x09, 0xa8, 0x1f, 0xe2, 0x41, 0x8d, 0x2d, 0x6a, 0xd3, 0x90, 0x62, 0x85, 0x4c, 0x60,
|
||||
0x18, 0x7c, 0x64, 0x1e, 0x1e, 0x92, 0x47, 0xa0, 0x9a, 0x36, 0xf3, 0xa2, 0xc0, 0xdd, 0x87, 0xf8,
|
||||
0x61, 0xff, 0x3c, 0x18, 0xbe, 0x85, 0x47, 0x64, 0x0c, 0x8a, 0x67, 0x58, 0x78, 0x4c, 0x08, 0x4c,
|
||||
0x03, 0xfa, 0xe9, 0x44, 0x1d, 0x93, 0xde, 0xca, 0x27, 0x64, 0x01, 0xf3, 0x3b, 0xc7, 0x82, 0x96,
|
||||
0x56, 0x77, 0x1c, 0x9e, 0x24, 0xf2, 0xf2, 0x9f, 0x23, 0xee, 0xa0, 0xb9, 0xa2, 0x57, 0xcf, 0xe0,
|
||||
0xa1, 0xcf, 0xef, 0x3a, 0x87, 0x3c, 0xc7, 0x22, 0xd5, 0x64, 0x91, 0xd6, 0x2b, 0x37, 0x23, 0xe9,
|
||||
0xad, 0x14, 0xe7, 0x59, 0xf9, 0xf7, 0xf2, 0xef, 0x3b, 0xfc, 0x1b, 0xa1, 0x2f, 0xa3, 0xc6, 0xf9,
|
||||
0xf6, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x98, 0xcc, 0xce, 0xde, 0x22, 0x02, 0x00, 0x00,
|
||||
}
|
968
vendor/google.golang.org/genproto/googleapis/genomics/v1/datasets.pb.go
generated
vendored
Normal file
968
vendor/google.golang.org/genproto/googleapis/genomics/v1/datasets.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,968 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/datasets.proto
|
||||
|
||||
package genomics
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
empty "github.com/golang/protobuf/ptypes/empty"
|
||||
timestamp "github.com/golang/protobuf/ptypes/timestamp"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
v1 "google.golang.org/genproto/googleapis/iam/v1"
|
||||
field_mask "google.golang.org/genproto/protobuf/field_mask"
|
||||
math "math"
|
||||
)
|
||||
|
||||
import (
|
||||
context "golang.org/x/net/context"
|
||||
grpc "google.golang.org/grpc"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A Dataset is a collection of genomic data.
|
||||
//
|
||||
// For more genomics resource definitions, see [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
type Dataset struct {
|
||||
// The server-generated dataset ID, unique across all datasets.
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The Google Cloud project ID that this dataset belongs to.
|
||||
ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||||
// The dataset name.
|
||||
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
|
||||
// The time this dataset was created, in seconds from the epoch.
|
||||
CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Dataset) Reset() { *m = Dataset{} }
|
||||
func (m *Dataset) String() string { return proto.CompactTextString(m) }
|
||||
func (*Dataset) ProtoMessage() {}
|
||||
func (*Dataset) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_ddd0efa223187e29, []int{0}
|
||||
}
|
||||
|
||||
func (m *Dataset) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Dataset.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Dataset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Dataset.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Dataset) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Dataset.Merge(m, src)
|
||||
}
|
||||
func (m *Dataset) XXX_Size() int {
|
||||
return xxx_messageInfo_Dataset.Size(m)
|
||||
}
|
||||
func (m *Dataset) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Dataset.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Dataset proto.InternalMessageInfo
|
||||
|
||||
func (m *Dataset) GetId() string {
|
||||
if m != nil {
|
||||
return m.Id
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Dataset) GetProjectId() string {
|
||||
if m != nil {
|
||||
return m.ProjectId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Dataset) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Dataset) GetCreateTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.CreateTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// The dataset list request.
|
||||
type ListDatasetsRequest struct {
|
||||
// Required. The Google Cloud project ID to list datasets for.
|
||||
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||||
// The maximum number of results to return in a single page. If unspecified,
|
||||
// defaults to 50. The maximum value is 1024.
|
||||
PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
|
||||
// The continuation token, which is used to page through large result sets.
|
||||
// To get the next page of results, set this parameter to the value of
|
||||
// `nextPageToken` from the previous response.
|
||||
PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ListDatasetsRequest) Reset() { *m = ListDatasetsRequest{} }
|
||||
func (m *ListDatasetsRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListDatasetsRequest) ProtoMessage() {}
|
||||
func (*ListDatasetsRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_ddd0efa223187e29, []int{1}
|
||||
}
|
||||
|
||||
func (m *ListDatasetsRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ListDatasetsRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ListDatasetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ListDatasetsRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *ListDatasetsRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ListDatasetsRequest.Merge(m, src)
|
||||
}
|
||||
func (m *ListDatasetsRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_ListDatasetsRequest.Size(m)
|
||||
}
|
||||
func (m *ListDatasetsRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ListDatasetsRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ListDatasetsRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *ListDatasetsRequest) GetProjectId() string {
|
||||
if m != nil {
|
||||
return m.ProjectId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ListDatasetsRequest) GetPageSize() int32 {
|
||||
if m != nil {
|
||||
return m.PageSize
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *ListDatasetsRequest) GetPageToken() string {
|
||||
if m != nil {
|
||||
return m.PageToken
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// The dataset list response.
|
||||
type ListDatasetsResponse struct {
|
||||
// The list of matching Datasets.
|
||||
Datasets []*Dataset `protobuf:"bytes,1,rep,name=datasets,proto3" json:"datasets,omitempty"`
|
||||
// The continuation token, which is used to page through large result sets.
|
||||
// Provide this value in a subsequent request to return the next page of
|
||||
// results. This field will be empty if there aren't any additional results.
|
||||
NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ListDatasetsResponse) Reset() { *m = ListDatasetsResponse{} }
|
||||
func (m *ListDatasetsResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ListDatasetsResponse) ProtoMessage() {}
|
||||
func (*ListDatasetsResponse) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_ddd0efa223187e29, []int{2}
|
||||
}
|
||||
|
||||
func (m *ListDatasetsResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ListDatasetsResponse.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ListDatasetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ListDatasetsResponse.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *ListDatasetsResponse) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ListDatasetsResponse.Merge(m, src)
|
||||
}
|
||||
func (m *ListDatasetsResponse) XXX_Size() int {
|
||||
return xxx_messageInfo_ListDatasetsResponse.Size(m)
|
||||
}
|
||||
func (m *ListDatasetsResponse) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ListDatasetsResponse.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ListDatasetsResponse proto.InternalMessageInfo
|
||||
|
||||
func (m *ListDatasetsResponse) GetDatasets() []*Dataset {
|
||||
if m != nil {
|
||||
return m.Datasets
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ListDatasetsResponse) GetNextPageToken() string {
|
||||
if m != nil {
|
||||
return m.NextPageToken
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type CreateDatasetRequest struct {
|
||||
// The dataset to be created. Must contain projectId and name.
|
||||
Dataset *Dataset `protobuf:"bytes,1,opt,name=dataset,proto3" json:"dataset,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *CreateDatasetRequest) Reset() { *m = CreateDatasetRequest{} }
|
||||
func (m *CreateDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*CreateDatasetRequest) ProtoMessage() {}
|
||||
func (*CreateDatasetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_ddd0efa223187e29, []int{3}
|
||||
}
|
||||
|
||||
func (m *CreateDatasetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_CreateDatasetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *CreateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_CreateDatasetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *CreateDatasetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_CreateDatasetRequest.Merge(m, src)
|
||||
}
|
||||
func (m *CreateDatasetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_CreateDatasetRequest.Size(m)
|
||||
}
|
||||
func (m *CreateDatasetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_CreateDatasetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_CreateDatasetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *CreateDatasetRequest) GetDataset() *Dataset {
|
||||
if m != nil {
|
||||
return m.Dataset
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type UpdateDatasetRequest struct {
|
||||
// The ID of the dataset to be updated.
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
// The new dataset data.
|
||||
Dataset *Dataset `protobuf:"bytes,2,opt,name=dataset,proto3" json:"dataset,omitempty"`
|
||||
// An optional mask specifying which fields to update. At this time, the only
|
||||
// mutable field is [name][google.genomics.v1.Dataset.name]. The only
|
||||
// acceptable value is "name". If unspecified, all mutable fields will be
|
||||
// updated.
|
||||
UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *UpdateDatasetRequest) Reset() { *m = UpdateDatasetRequest{} }
|
||||
func (m *UpdateDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*UpdateDatasetRequest) ProtoMessage() {}
|
||||
func (*UpdateDatasetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_ddd0efa223187e29, []int{4}
|
||||
}
|
||||
|
||||
func (m *UpdateDatasetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_UpdateDatasetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *UpdateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_UpdateDatasetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *UpdateDatasetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_UpdateDatasetRequest.Merge(m, src)
|
||||
}
|
||||
func (m *UpdateDatasetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_UpdateDatasetRequest.Size(m)
|
||||
}
|
||||
func (m *UpdateDatasetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_UpdateDatasetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_UpdateDatasetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *UpdateDatasetRequest) GetDatasetId() string {
|
||||
if m != nil {
|
||||
return m.DatasetId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *UpdateDatasetRequest) GetDataset() *Dataset {
|
||||
if m != nil {
|
||||
return m.Dataset
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *UpdateDatasetRequest) GetUpdateMask() *field_mask.FieldMask {
|
||||
if m != nil {
|
||||
return m.UpdateMask
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type DeleteDatasetRequest struct {
|
||||
// The ID of the dataset to be deleted.
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *DeleteDatasetRequest) Reset() { *m = DeleteDatasetRequest{} }
|
||||
func (m *DeleteDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*DeleteDatasetRequest) ProtoMessage() {}
|
||||
func (*DeleteDatasetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_ddd0efa223187e29, []int{5}
|
||||
}
|
||||
|
||||
func (m *DeleteDatasetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_DeleteDatasetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *DeleteDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_DeleteDatasetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *DeleteDatasetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_DeleteDatasetRequest.Merge(m, src)
|
||||
}
|
||||
func (m *DeleteDatasetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_DeleteDatasetRequest.Size(m)
|
||||
}
|
||||
func (m *DeleteDatasetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_DeleteDatasetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_DeleteDatasetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *DeleteDatasetRequest) GetDatasetId() string {
|
||||
if m != nil {
|
||||
return m.DatasetId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type UndeleteDatasetRequest struct {
|
||||
// The ID of the dataset to be undeleted.
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *UndeleteDatasetRequest) Reset() { *m = UndeleteDatasetRequest{} }
|
||||
func (m *UndeleteDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*UndeleteDatasetRequest) ProtoMessage() {}
|
||||
func (*UndeleteDatasetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_ddd0efa223187e29, []int{6}
|
||||
}
|
||||
|
||||
func (m *UndeleteDatasetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_UndeleteDatasetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *UndeleteDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_UndeleteDatasetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *UndeleteDatasetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_UndeleteDatasetRequest.Merge(m, src)
|
||||
}
|
||||
func (m *UndeleteDatasetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_UndeleteDatasetRequest.Size(m)
|
||||
}
|
||||
func (m *UndeleteDatasetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_UndeleteDatasetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_UndeleteDatasetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *UndeleteDatasetRequest) GetDatasetId() string {
|
||||
if m != nil {
|
||||
return m.DatasetId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type GetDatasetRequest struct {
|
||||
// The ID of the dataset.
|
||||
DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *GetDatasetRequest) Reset() { *m = GetDatasetRequest{} }
|
||||
func (m *GetDatasetRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*GetDatasetRequest) ProtoMessage() {}
|
||||
func (*GetDatasetRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_ddd0efa223187e29, []int{7}
|
||||
}
|
||||
|
||||
func (m *GetDatasetRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_GetDatasetRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *GetDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_GetDatasetRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *GetDatasetRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_GetDatasetRequest.Merge(m, src)
|
||||
}
|
||||
func (m *GetDatasetRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_GetDatasetRequest.Size(m)
|
||||
}
|
||||
func (m *GetDatasetRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_GetDatasetRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_GetDatasetRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *GetDatasetRequest) GetDatasetId() string {
|
||||
if m != nil {
|
||||
return m.DatasetId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*Dataset)(nil), "google.genomics.v1.Dataset")
|
||||
proto.RegisterType((*ListDatasetsRequest)(nil), "google.genomics.v1.ListDatasetsRequest")
|
||||
proto.RegisterType((*ListDatasetsResponse)(nil), "google.genomics.v1.ListDatasetsResponse")
|
||||
proto.RegisterType((*CreateDatasetRequest)(nil), "google.genomics.v1.CreateDatasetRequest")
|
||||
proto.RegisterType((*UpdateDatasetRequest)(nil), "google.genomics.v1.UpdateDatasetRequest")
|
||||
proto.RegisterType((*DeleteDatasetRequest)(nil), "google.genomics.v1.DeleteDatasetRequest")
|
||||
proto.RegisterType((*UndeleteDatasetRequest)(nil), "google.genomics.v1.UndeleteDatasetRequest")
|
||||
proto.RegisterType((*GetDatasetRequest)(nil), "google.genomics.v1.GetDatasetRequest")
|
||||
}
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ context.Context
|
||||
var _ grpc.ClientConn
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the grpc package it is being compiled against.
|
||||
const _ = grpc.SupportPackageIsVersion4
|
||||
|
||||
// DatasetServiceV1Client is the client API for DatasetServiceV1 service.
|
||||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
|
||||
type DatasetServiceV1Client interface {
|
||||
// Lists datasets within a project.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
ListDatasets(ctx context.Context, in *ListDatasetsRequest, opts ...grpc.CallOption) (*ListDatasetsResponse, error)
|
||||
// Creates a new dataset.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
CreateDataset(ctx context.Context, in *CreateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error)
|
||||
// Gets a dataset by ID.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
GetDataset(ctx context.Context, in *GetDatasetRequest, opts ...grpc.CallOption) (*Dataset, error)
|
||||
// Updates a dataset.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
//
|
||||
// This method supports patch semantics.
|
||||
UpdateDataset(ctx context.Context, in *UpdateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error)
|
||||
// Deletes a dataset and all of its contents (all read group sets,
|
||||
// reference sets, variant sets, call sets, annotation sets, etc.)
|
||||
// This is reversible (up to one week after the deletion) via
|
||||
// the
|
||||
// [datasets.undelete][google.genomics.v1.DatasetServiceV1.UndeleteDataset]
|
||||
// operation.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*empty.Empty, error)
|
||||
// Undeletes a dataset by restoring a dataset which was deleted via this API.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
//
|
||||
// This operation is only possible for a week after the deletion occurred.
|
||||
UndeleteDataset(ctx context.Context, in *UndeleteDatasetRequest, opts ...grpc.CallOption) (*Dataset, error)
|
||||
// Sets the access control policy on the specified dataset. Replaces any
|
||||
// existing policy.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
//
|
||||
// See <a href="/iam/docs/managing-policies#setting_a_policy">Setting a
|
||||
// Policy</a> for more information.
|
||||
SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error)
|
||||
// Gets the access control policy for the dataset. This is empty if the
|
||||
// policy or resource does not exist.
|
||||
//
|
||||
// See <a href="/iam/docs/managing-policies#getting_a_policy">Getting a
|
||||
// Policy</a> for more information.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error)
|
||||
// Returns permissions that a caller has on the specified resource.
|
||||
// See <a href="/iam/docs/managing-policies#testing_permissions">Testing
|
||||
// Permissions</a> for more information.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error)
|
||||
}
|
||||
|
||||
type datasetServiceV1Client struct {
|
||||
cc *grpc.ClientConn
|
||||
}
|
||||
|
||||
func NewDatasetServiceV1Client(cc *grpc.ClientConn) DatasetServiceV1Client {
|
||||
return &datasetServiceV1Client{cc}
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) ListDatasets(ctx context.Context, in *ListDatasetsRequest, opts ...grpc.CallOption) (*ListDatasetsResponse, error) {
|
||||
out := new(ListDatasetsResponse)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/ListDatasets", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) CreateDataset(ctx context.Context, in *CreateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) {
|
||||
out := new(Dataset)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/CreateDataset", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) GetDataset(ctx context.Context, in *GetDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) {
|
||||
out := new(Dataset)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/GetDataset", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) UpdateDataset(ctx context.Context, in *UpdateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) {
|
||||
out := new(Dataset)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/UpdateDataset", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
|
||||
out := new(empty.Empty)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/DeleteDataset", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) UndeleteDataset(ctx context.Context, in *UndeleteDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) {
|
||||
out := new(Dataset)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/UndeleteDataset", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) {
|
||||
out := new(v1.Policy)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/SetIamPolicy", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) {
|
||||
out := new(v1.Policy)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/GetIamPolicy", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *datasetServiceV1Client) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) {
|
||||
out := new(v1.TestIamPermissionsResponse)
|
||||
err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/TestIamPermissions", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// DatasetServiceV1Server is the server API for DatasetServiceV1 service.
|
||||
type DatasetServiceV1Server interface {
|
||||
// Lists datasets within a project.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
ListDatasets(context.Context, *ListDatasetsRequest) (*ListDatasetsResponse, error)
|
||||
// Creates a new dataset.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
CreateDataset(context.Context, *CreateDatasetRequest) (*Dataset, error)
|
||||
// Gets a dataset by ID.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
GetDataset(context.Context, *GetDatasetRequest) (*Dataset, error)
|
||||
// Updates a dataset.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
//
|
||||
// This method supports patch semantics.
|
||||
UpdateDataset(context.Context, *UpdateDatasetRequest) (*Dataset, error)
|
||||
// Deletes a dataset and all of its contents (all read group sets,
|
||||
// reference sets, variant sets, call sets, annotation sets, etc.)
|
||||
// This is reversible (up to one week after the deletion) via
|
||||
// the
|
||||
// [datasets.undelete][google.genomics.v1.DatasetServiceV1.UndeleteDataset]
|
||||
// operation.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
DeleteDataset(context.Context, *DeleteDatasetRequest) (*empty.Empty, error)
|
||||
// Undeletes a dataset by restoring a dataset which was deleted via this API.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
//
|
||||
// This operation is only possible for a week after the deletion occurred.
|
||||
UndeleteDataset(context.Context, *UndeleteDatasetRequest) (*Dataset, error)
|
||||
// Sets the access control policy on the specified dataset. Replaces any
|
||||
// existing policy.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
//
|
||||
// See <a href="/iam/docs/managing-policies#setting_a_policy">Setting a
|
||||
// Policy</a> for more information.
|
||||
SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error)
|
||||
// Gets the access control policy for the dataset. This is empty if the
|
||||
// policy or resource does not exist.
|
||||
//
|
||||
// See <a href="/iam/docs/managing-policies#getting_a_policy">Getting a
|
||||
// Policy</a> for more information.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error)
|
||||
// Returns permissions that a caller has on the specified resource.
|
||||
// See <a href="/iam/docs/managing-policies#testing_permissions">Testing
|
||||
// Permissions</a> for more information.
|
||||
//
|
||||
// For the definitions of datasets and other genomics resources, see
|
||||
// [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error)
|
||||
}
|
||||
|
||||
func RegisterDatasetServiceV1Server(s *grpc.Server, srv DatasetServiceV1Server) {
|
||||
s.RegisterService(&_DatasetServiceV1_serviceDesc, srv)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_ListDatasets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(ListDatasetsRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(DatasetServiceV1Server).ListDatasets(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/ListDatasets",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).ListDatasets(ctx, req.(*ListDatasetsRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_CreateDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(CreateDatasetRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(DatasetServiceV1Server).CreateDataset(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/CreateDataset",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).CreateDataset(ctx, req.(*CreateDatasetRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_GetDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(GetDatasetRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(DatasetServiceV1Server).GetDataset(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/GetDataset",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).GetDataset(ctx, req.(*GetDatasetRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_UpdateDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(UpdateDatasetRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(DatasetServiceV1Server).UpdateDataset(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/UpdateDataset",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).UpdateDataset(ctx, req.(*UpdateDatasetRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_DeleteDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(DeleteDatasetRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(DatasetServiceV1Server).DeleteDataset(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/DeleteDataset",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).DeleteDataset(ctx, req.(*DeleteDatasetRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_UndeleteDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(UndeleteDatasetRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(DatasetServiceV1Server).UndeleteDataset(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/UndeleteDataset",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).UndeleteDataset(ctx, req.(*UndeleteDatasetRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(v1.SetIamPolicyRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(DatasetServiceV1Server).SetIamPolicy(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/SetIamPolicy",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(v1.GetIamPolicyRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(DatasetServiceV1Server).GetIamPolicy(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/GetIamPolicy",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _DatasetServiceV1_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(v1.TestIamPermissionsRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(DatasetServiceV1Server).TestIamPermissions(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/google.genomics.v1.DatasetServiceV1/TestIamPermissions",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(DatasetServiceV1Server).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
var _DatasetServiceV1_serviceDesc = grpc.ServiceDesc{
|
||||
ServiceName: "google.genomics.v1.DatasetServiceV1",
|
||||
HandlerType: (*DatasetServiceV1Server)(nil),
|
||||
Methods: []grpc.MethodDesc{
|
||||
{
|
||||
MethodName: "ListDatasets",
|
||||
Handler: _DatasetServiceV1_ListDatasets_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "CreateDataset",
|
||||
Handler: _DatasetServiceV1_CreateDataset_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "GetDataset",
|
||||
Handler: _DatasetServiceV1_GetDataset_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "UpdateDataset",
|
||||
Handler: _DatasetServiceV1_UpdateDataset_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "DeleteDataset",
|
||||
Handler: _DatasetServiceV1_DeleteDataset_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "UndeleteDataset",
|
||||
Handler: _DatasetServiceV1_UndeleteDataset_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "SetIamPolicy",
|
||||
Handler: _DatasetServiceV1_SetIamPolicy_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "GetIamPolicy",
|
||||
Handler: _DatasetServiceV1_GetIamPolicy_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "TestIamPermissions",
|
||||
Handler: _DatasetServiceV1_TestIamPermissions_Handler,
|
||||
},
|
||||
},
|
||||
Streams: []grpc.StreamDesc{},
|
||||
Metadata: "google/genomics/v1/datasets.proto",
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/datasets.proto", fileDescriptor_ddd0efa223187e29) }
|
||||
|
||||
var fileDescriptor_ddd0efa223187e29 = []byte{
|
||||
// 786 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xd1, 0x4e, 0x13, 0x4d,
|
||||
0x14, 0xce, 0x16, 0xfe, 0x1f, 0x7a, 0xa0, 0xa0, 0x63, 0xc5, 0xda, 0x8a, 0x96, 0x8d, 0x42, 0xad,
|
||||
0xba, 0x4d, 0x6b, 0x08, 0x49, 0x89, 0x37, 0x88, 0x12, 0x12, 0x49, 0x9a, 0x02, 0x5e, 0x78, 0xd3,
|
||||
0x0c, 0xdd, 0xa1, 0x8e, 0x74, 0x77, 0xd6, 0x9d, 0x29, 0x28, 0xc8, 0x0d, 0x77, 0x5c, 0xfb, 0x00,
|
||||
0x26, 0xde, 0xf9, 0x3c, 0xbe, 0x82, 0x0f, 0xe1, 0xa5, 0x99, 0xd9, 0xd9, 0x76, 0xdb, 0x2e, 0x05,
|
||||
0x8c, 0x77, 0xdb, 0x73, 0xbe, 0x73, 0xbe, 0xef, 0xcc, 0xf9, 0x76, 0xba, 0xb0, 0xd0, 0x62, 0xac,
|
||||
0xd5, 0x26, 0xa5, 0x16, 0x71, 0x99, 0x43, 0x9b, 0xbc, 0x74, 0x58, 0x2e, 0xd9, 0x58, 0x60, 0x4e,
|
||||
0x04, 0xb7, 0x3c, 0x9f, 0x09, 0x86, 0x50, 0x00, 0xb1, 0x42, 0x88, 0x75, 0x58, 0xce, 0xde, 0xd3,
|
||||
0x65, 0xd8, 0xa3, 0x25, 0xec, 0xba, 0x4c, 0x60, 0x41, 0x99, 0xab, 0x2b, 0xb2, 0xf7, 0x75, 0x96,
|
||||
0x62, 0x47, 0xf6, 0xa3, 0xd8, 0x69, 0x78, 0xac, 0x4d, 0x9b, 0x9f, 0x75, 0x3e, 0xdb, 0x9f, 0xef,
|
||||
0xcb, 0xe5, 0x74, 0x4e, 0xfd, 0xda, 0xeb, 0xec, 0x97, 0x88, 0xe3, 0x89, 0x30, 0x99, 0x1f, 0x4c,
|
||||
0xee, 0x53, 0xd2, 0xb6, 0x1b, 0x0e, 0xe6, 0x07, 0x1a, 0xf1, 0x60, 0x10, 0x21, 0xa8, 0x43, 0xb8,
|
||||
0xc0, 0x8e, 0x17, 0x00, 0xcc, 0x73, 0x03, 0x26, 0xd6, 0x83, 0x01, 0xd1, 0x0c, 0x24, 0xa8, 0x9d,
|
||||
0x31, 0xf2, 0x46, 0x21, 0x59, 0x4f, 0x50, 0x1b, 0xcd, 0x03, 0x78, 0x3e, 0xfb, 0x40, 0x9a, 0xa2,
|
||||
0x41, 0xed, 0x4c, 0x42, 0xc5, 0x93, 0x3a, 0xb2, 0x69, 0x23, 0x04, 0xe3, 0x2e, 0x76, 0x48, 0x66,
|
||||
0x4c, 0x25, 0xd4, 0x33, 0x5a, 0x85, 0xa9, 0xa6, 0x4f, 0xb0, 0x20, 0x0d, 0x49, 0x94, 0x19, 0xcf,
|
||||
0x1b, 0x85, 0xa9, 0x4a, 0xd6, 0xd2, 0x47, 0x16, 0xaa, 0xb0, 0x76, 0x42, 0x15, 0x75, 0x08, 0xe0,
|
||||
0x32, 0x60, 0x7a, 0x70, 0xeb, 0x0d, 0xe5, 0x42, 0xcb, 0xe1, 0x75, 0xf2, 0xb1, 0x43, 0xb8, 0x18,
|
||||
0x90, 0x61, 0x0c, 0xca, 0xc8, 0x41, 0xd2, 0xc3, 0x2d, 0xd2, 0xe0, 0xf4, 0x98, 0x28, 0x91, 0xff,
|
||||
0xd5, 0x27, 0x65, 0x60, 0x9b, 0x1e, 0x13, 0x55, 0x2b, 0x93, 0x82, 0x1d, 0x10, 0x57, 0x2b, 0x55,
|
||||
0xf0, 0x1d, 0x19, 0x30, 0x8f, 0x20, 0xdd, 0xcf, 0xc8, 0x3d, 0xe6, 0x72, 0x82, 0x56, 0x60, 0x32,
|
||||
0xdc, 0x7a, 0xc6, 0xc8, 0x8f, 0x15, 0xa6, 0x2a, 0x39, 0x6b, 0x78, 0xed, 0x96, 0xae, 0xab, 0x77,
|
||||
0xc1, 0x68, 0x11, 0x66, 0x5d, 0xf2, 0x49, 0x34, 0x22, 0xa4, 0xc1, 0xb9, 0xa5, 0x64, 0xb8, 0xd6,
|
||||
0x25, 0xde, 0x82, 0xf4, 0x4b, 0x35, 0x78, 0xd8, 0x42, 0xcf, 0xba, 0x0c, 0x13, 0xba, 0x97, 0x1a,
|
||||
0xf4, 0x12, 0xde, 0x10, 0x6b, 0xfe, 0x30, 0x20, 0xbd, 0xeb, 0xd9, 0xc3, 0xfd, 0xe6, 0x01, 0x34,
|
||||
0x26, 0x72, 0x76, 0x3a, 0xb2, 0x69, 0x47, 0xe9, 0x12, 0x57, 0xa7, 0x93, 0x5b, 0xee, 0x28, 0x36,
|
||||
0x65, 0x35, 0x75, 0xac, 0x71, 0x5b, 0x7e, 0x2d, 0xdd, 0xb8, 0x85, 0xf9, 0x41, 0x1d, 0x02, 0xb8,
|
||||
0x7c, 0x36, 0x97, 0x21, 0xbd, 0x4e, 0xda, 0xe4, 0x9a, 0x52, 0xcd, 0x15, 0x98, 0xdb, 0x75, 0xed,
|
||||
0xbf, 0x28, 0xac, 0xc0, 0xcd, 0x0d, 0x22, 0xae, 0x55, 0x53, 0xf9, 0x96, 0x84, 0x1b, 0xba, 0x62,
|
||||
0x9b, 0xf8, 0x87, 0xb4, 0x49, 0xde, 0x96, 0xd1, 0x11, 0x4c, 0x47, 0xcd, 0x82, 0x96, 0xe2, 0xce,
|
||||
0x2a, 0xc6, 0xc0, 0xd9, 0xc2, 0xe5, 0xc0, 0xc0, 0x77, 0x66, 0xfa, 0xec, 0xe7, 0xaf, 0xaf, 0x89,
|
||||
0x19, 0x34, 0x1d, 0xbd, 0x77, 0x50, 0x07, 0x52, 0x7d, 0x66, 0x41, 0xb1, 0x0d, 0xe3, 0xfc, 0x94,
|
||||
0x1d, 0xb5, 0x4f, 0x73, 0x5e, 0xb1, 0xdd, 0x31, 0xfb, 0xd8, 0xaa, 0xdd, 0x2d, 0x73, 0x80, 0xde,
|
||||
0xc1, 0xa1, 0x47, 0x71, 0x9d, 0x86, 0x0e, 0x76, 0x34, 0xe1, 0x82, 0x22, 0xcc, 0xa1, 0xbb, 0x51,
|
||||
0xc2, 0xd2, 0x49, 0x6f, 0x13, 0xa7, 0xe8, 0xcc, 0x80, 0x54, 0x9f, 0x93, 0xe3, 0x87, 0x8d, 0x33,
|
||||
0xfb, 0x68, 0xee, 0xa2, 0xe2, 0x7e, 0x58, 0xb9, 0x98, 0xbb, 0x37, 0xb9, 0x80, 0x54, 0x9f, 0x45,
|
||||
0xe3, 0x35, 0xc4, 0xb9, 0x38, 0x3b, 0x37, 0xf4, 0x16, 0xbc, 0x92, 0x17, 0x76, 0x38, 0x7a, 0x71,
|
||||
0xc4, 0xe8, 0xe7, 0x06, 0xcc, 0x0e, 0x58, 0x1c, 0x15, 0x63, 0x87, 0x8f, 0x7d, 0x0f, 0x46, 0x8f,
|
||||
0xff, 0x4c, 0xf1, 0x2f, 0x99, 0xe6, 0xc5, 0xe3, 0x77, 0x74, 0xdb, 0xaa, 0x51, 0x44, 0x5f, 0x60,
|
||||
0x7a, 0x9b, 0x88, 0x4d, 0xec, 0xd4, 0xd4, 0x9f, 0x11, 0x32, 0xc3, 0xde, 0x14, 0x3b, 0xb2, 0x6d,
|
||||
0x34, 0x19, 0xf2, 0xdf, 0x1e, 0xc0, 0x04, 0x59, 0xb3, 0xac, 0x98, 0x9f, 0x98, 0x8b, 0x92, 0xf9,
|
||||
0xc4, 0x27, 0x9c, 0x75, 0xfc, 0x26, 0x79, 0xd1, 0xd5, 0x50, 0x3c, 0xad, 0xf2, 0x48, 0x37, 0xcd,
|
||||
0xbe, 0x31, 0x8a, 0x7d, 0xe3, 0x9f, 0xb2, 0xb7, 0x06, 0xd8, 0xbf, 0x1b, 0x80, 0x76, 0x08, 0x57,
|
||||
0x41, 0xe2, 0x3b, 0x94, 0x73, 0xf9, 0x5f, 0xde, 0xf3, 0x80, 0x26, 0x18, 0x86, 0x84, 0x52, 0x1e,
|
||||
0x5f, 0x01, 0xa9, 0x5f, 0xf8, 0x15, 0x25, 0xaf, 0x6c, 0x3e, 0xbd, 0x58, 0x9e, 0x18, 0xaa, 0xae,
|
||||
0x1a, 0xc5, 0xb5, 0xf7, 0x30, 0xd7, 0x64, 0x4e, 0xcc, 0xc6, 0xd7, 0x52, 0xe1, 0xad, 0x52, 0x93,
|
||||
0x0e, 0xac, 0x19, 0xef, 0xaa, 0x21, 0x88, 0xb5, 0xb1, 0xdb, 0xb2, 0x98, 0xdf, 0x92, 0x9f, 0x37,
|
||||
0xca, 0x9f, 0xa5, 0x20, 0x85, 0x3d, 0xca, 0xa3, 0x9f, 0x3c, 0xab, 0xe1, 0xf3, 0x6f, 0xc3, 0xd8,
|
||||
0xfb, 0x5f, 0x21, 0x9f, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0x87, 0x48, 0x07, 0xbb, 0x1b, 0x09,
|
||||
0x00, 0x00,
|
||||
}
|
246
vendor/google.golang.org/genproto/googleapis/genomics/v1/operations.pb.go
generated
vendored
Normal file
246
vendor/google.golang.org/genproto/googleapis/genomics/v1/operations.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,246 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/operations.proto
|
||||
|
||||
package genomics
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
any "github.com/golang/protobuf/ptypes/any"
|
||||
timestamp "github.com/golang/protobuf/ptypes/timestamp"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
math "math"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// Metadata describing an [Operation][google.longrunning.Operation].
|
||||
type OperationMetadata struct {
|
||||
// The Google Cloud Project in which the job is scoped.
|
||||
ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
|
||||
// The time at which the job was submitted to the Genomics service.
|
||||
CreateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"`
|
||||
// The time at which the job began to run.
|
||||
StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
|
||||
// The time at which the job stopped running.
|
||||
EndTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"`
|
||||
// The original request that started the operation. Note that this will be in
|
||||
// current version of the API. If the operation was started with v1beta2 API
|
||||
// and a GetOperation is performed on v1 API, a v1 request will be returned.
|
||||
Request *any.Any `protobuf:"bytes,5,opt,name=request,proto3" json:"request,omitempty"`
|
||||
// Optional event messages that were generated during the job's execution.
|
||||
// This also contains any warnings that were generated during import
|
||||
// or export.
|
||||
Events []*OperationEvent `protobuf:"bytes,6,rep,name=events,proto3" json:"events,omitempty"`
|
||||
// This field is deprecated. Use `labels` instead. Optionally provided by the
|
||||
// caller when submitting the request that creates the operation.
|
||||
ClientId string `protobuf:"bytes,7,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"`
|
||||
// Runtime metadata on this Operation.
|
||||
RuntimeMetadata *any.Any `protobuf:"bytes,8,opt,name=runtime_metadata,json=runtimeMetadata,proto3" json:"runtime_metadata,omitempty"`
|
||||
// Optionally provided by the caller when submitting the request that creates
|
||||
// the operation.
|
||||
Labels map[string]string `protobuf:"bytes,9,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) Reset() { *m = OperationMetadata{} }
|
||||
func (m *OperationMetadata) String() string { return proto.CompactTextString(m) }
|
||||
func (*OperationMetadata) ProtoMessage() {}
|
||||
func (*OperationMetadata) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_ab6c330828363542, []int{0}
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_OperationMetadata.Unmarshal(m, b)
|
||||
}
|
||||
func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *OperationMetadata) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_OperationMetadata.Merge(m, src)
|
||||
}
|
||||
func (m *OperationMetadata) XXX_Size() int {
|
||||
return xxx_messageInfo_OperationMetadata.Size(m)
|
||||
}
|
||||
func (m *OperationMetadata) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_OperationMetadata.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo
|
||||
|
||||
func (m *OperationMetadata) GetProjectId() string {
|
||||
if m != nil {
|
||||
return m.ProjectId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.CreateTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetStartTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.StartTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetEndTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.EndTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetRequest() *any.Any {
|
||||
if m != nil {
|
||||
return m.Request
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetEvents() []*OperationEvent {
|
||||
if m != nil {
|
||||
return m.Events
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetClientId() string {
|
||||
if m != nil {
|
||||
return m.ClientId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetRuntimeMetadata() *any.Any {
|
||||
if m != nil {
|
||||
return m.RuntimeMetadata
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationMetadata) GetLabels() map[string]string {
|
||||
if m != nil {
|
||||
return m.Labels
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// An event that occurred during an [Operation][google.longrunning.Operation].
|
||||
type OperationEvent struct {
|
||||
// Optional time of when event started.
|
||||
StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
|
||||
// Optional time of when event finished. An event can have a start time and no
|
||||
// finish time. If an event has a finish time, there must be a start time.
|
||||
EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"`
|
||||
// Required description of event.
|
||||
Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *OperationEvent) Reset() { *m = OperationEvent{} }
|
||||
func (m *OperationEvent) String() string { return proto.CompactTextString(m) }
|
||||
func (*OperationEvent) ProtoMessage() {}
|
||||
func (*OperationEvent) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_ab6c330828363542, []int{1}
|
||||
}
|
||||
|
||||
func (m *OperationEvent) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_OperationEvent.Unmarshal(m, b)
|
||||
}
|
||||
func (m *OperationEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_OperationEvent.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *OperationEvent) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_OperationEvent.Merge(m, src)
|
||||
}
|
||||
func (m *OperationEvent) XXX_Size() int {
|
||||
return xxx_messageInfo_OperationEvent.Size(m)
|
||||
}
|
||||
func (m *OperationEvent) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_OperationEvent.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_OperationEvent proto.InternalMessageInfo
|
||||
|
||||
func (m *OperationEvent) GetStartTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.StartTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationEvent) GetEndTime() *timestamp.Timestamp {
|
||||
if m != nil {
|
||||
return m.EndTime
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *OperationEvent) GetDescription() string {
|
||||
if m != nil {
|
||||
return m.Description
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*OperationMetadata)(nil), "google.genomics.v1.OperationMetadata")
|
||||
proto.RegisterMapType((map[string]string)(nil), "google.genomics.v1.OperationMetadata.LabelsEntry")
|
||||
proto.RegisterType((*OperationEvent)(nil), "google.genomics.v1.OperationEvent")
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/operations.proto", fileDescriptor_ab6c330828363542)
|
||||
}
|
||||
|
||||
var fileDescriptor_ab6c330828363542 = []byte{
|
||||
// 456 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x93, 0x41, 0x6f, 0xd3, 0x30,
|
||||
0x14, 0xc7, 0xe5, 0x76, 0x6b, 0x9b, 0x17, 0x89, 0x0d, 0x6b, 0x42, 0xa1, 0x80, 0xa8, 0xca, 0xa5,
|
||||
0x27, 0x47, 0x1d, 0x42, 0x62, 0xdd, 0x01, 0x31, 0x69, 0x87, 0x4a, 0x20, 0xa6, 0x88, 0x13, 0x97,
|
||||
0xca, 0x4d, 0x1e, 0x51, 0x46, 0x62, 0x07, 0xdb, 0xad, 0xd4, 0xef, 0xc3, 0x17, 0xe0, 0xdb, 0x71,
|
||||
0x44, 0xb1, 0x9d, 0x2a, 0x6c, 0x68, 0x45, 0xdc, 0xec, 0xf7, 0xfe, 0x3f, 0xfb, 0x9f, 0xf7, 0x8f,
|
||||
0xe1, 0x55, 0x2e, 0x65, 0x5e, 0x62, 0x9c, 0xa3, 0x90, 0x55, 0x91, 0xea, 0x78, 0x3b, 0x8f, 0x65,
|
||||
0x8d, 0x8a, 0x9b, 0x42, 0x0a, 0xcd, 0x6a, 0x25, 0x8d, 0xa4, 0xd4, 0x89, 0x58, 0x2b, 0x62, 0xdb,
|
||||
0xf9, 0xf8, 0xb9, 0x07, 0x79, 0x5d, 0xc4, 0x5c, 0x08, 0x69, 0xba, 0xc4, 0xf8, 0xa9, 0xef, 0xda,
|
||||
0xdd, 0x7a, 0xf3, 0x35, 0xe6, 0x62, 0xe7, 0x5b, 0x2f, 0xef, 0xb6, 0x4c, 0x51, 0xa1, 0x36, 0xbc,
|
||||
0xaa, 0x9d, 0x60, 0xfa, 0xf3, 0x08, 0x1e, 0x7f, 0x6a, 0x2d, 0x7c, 0x44, 0xc3, 0x33, 0x6e, 0x38,
|
||||
0x7d, 0x01, 0x50, 0x2b, 0x79, 0x8b, 0xa9, 0x59, 0x15, 0x59, 0x44, 0x26, 0x64, 0x16, 0x24, 0x81,
|
||||
0xaf, 0x2c, 0x33, 0x7a, 0x09, 0x61, 0xaa, 0x90, 0x1b, 0x5c, 0x35, 0xc7, 0x45, 0xbd, 0x09, 0x99,
|
||||
0x85, 0xe7, 0x63, 0xe6, 0x8d, 0xb7, 0x77, 0xb1, 0xcf, 0xed, 0x5d, 0x09, 0x38, 0x79, 0x53, 0xa0,
|
||||
0x17, 0x00, 0xda, 0x70, 0x65, 0x1c, 0xdb, 0x3f, 0xc8, 0x06, 0x56, 0x6d, 0xd1, 0x37, 0x30, 0x42,
|
||||
0x91, 0x39, 0xf0, 0xe8, 0x20, 0x38, 0x44, 0x91, 0x59, 0x8c, 0xc1, 0x50, 0xe1, 0xf7, 0x0d, 0x6a,
|
||||
0x13, 0x1d, 0x5b, 0xea, 0xec, 0x1e, 0xf5, 0x5e, 0xec, 0x92, 0x56, 0x44, 0x17, 0x30, 0xc0, 0x2d,
|
||||
0x0a, 0xa3, 0xa3, 0xc1, 0xa4, 0x3f, 0x0b, 0xcf, 0xa7, 0xec, 0x7e, 0x24, 0x6c, 0x3f, 0xb4, 0xeb,
|
||||
0x46, 0x9a, 0x78, 0x82, 0x3e, 0x83, 0x20, 0x2d, 0x0b, 0x14, 0x76, 0x70, 0x43, 0x3b, 0xb8, 0x91,
|
||||
0x2b, 0x2c, 0x33, 0xfa, 0x0e, 0x4e, 0xd5, 0x46, 0x34, 0xf6, 0x57, 0x95, 0x1f, 0x75, 0x34, 0x7a,
|
||||
0xc0, 0xd1, 0x89, 0x57, 0xef, 0x73, 0x59, 0xc2, 0xa0, 0xe4, 0x6b, 0x2c, 0x75, 0x14, 0x58, 0x67,
|
||||
0xf3, 0x07, 0x9d, 0xb5, 0x18, 0xfb, 0x60, 0x99, 0x6b, 0x61, 0xd4, 0x2e, 0xf1, 0x07, 0x8c, 0x2f,
|
||||
0x20, 0xec, 0x94, 0xe9, 0x29, 0xf4, 0xbf, 0xe1, 0xce, 0x47, 0xdd, 0x2c, 0xe9, 0x19, 0x1c, 0x6f,
|
||||
0x79, 0xb9, 0x71, 0xf1, 0x06, 0x89, 0xdb, 0x2c, 0x7a, 0x6f, 0xc9, 0xf4, 0x07, 0x81, 0x47, 0x7f,
|
||||
0x7e, 0xfe, 0x9d, 0x50, 0xc9, 0xff, 0x86, 0xda, 0xfb, 0xf7, 0x50, 0x27, 0x10, 0x66, 0xa8, 0x53,
|
||||
0x55, 0xd4, 0x8d, 0x0b, 0xfb, 0x1f, 0x05, 0x49, 0xb7, 0x74, 0x75, 0x0b, 0x4f, 0x52, 0x59, 0xfd,
|
||||
0x65, 0x42, 0x57, 0x27, 0x7b, 0xf7, 0xfa, 0xa6, 0xb9, 0xe2, 0x86, 0x7c, 0x59, 0xb4, 0x32, 0x59,
|
||||
0x72, 0x91, 0x33, 0xa9, 0xf2, 0xe6, 0x95, 0x5a, 0x03, 0xb1, 0x6b, 0xf1, 0xba, 0xd0, 0xdd, 0x97,
|
||||
0x7b, 0xd9, 0xae, 0x7f, 0x11, 0xb2, 0x1e, 0x58, 0xe5, 0xeb, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff,
|
||||
0x6a, 0xf6, 0xa8, 0x9a, 0xe2, 0x03, 0x00, 0x00,
|
||||
}
|
109
vendor/google.golang.org/genproto/googleapis/genomics/v1/position.pb.go
generated
vendored
Normal file
109
vendor/google.golang.org/genproto/googleapis/genomics/v1/position.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,109 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/position.proto
|
||||
|
||||
package genomics
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
math "math"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// An abstraction for referring to a genomic position, in relation to some
|
||||
// already known reference. For now, represents a genomic position as a
|
||||
// reference name, a base number on that reference (0-based), and a
|
||||
// determination of forward or reverse strand.
|
||||
type Position struct {
|
||||
// The name of the reference in whatever reference set is being used.
|
||||
ReferenceName string `protobuf:"bytes,1,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"`
|
||||
// The 0-based offset from the start of the forward strand for that reference.
|
||||
Position int64 `protobuf:"varint,2,opt,name=position,proto3" json:"position,omitempty"`
|
||||
// Whether this position is on the reverse strand, as opposed to the forward
|
||||
// strand.
|
||||
ReverseStrand bool `protobuf:"varint,3,opt,name=reverse_strand,json=reverseStrand,proto3" json:"reverse_strand,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Position) Reset() { *m = Position{} }
|
||||
func (m *Position) String() string { return proto.CompactTextString(m) }
|
||||
func (*Position) ProtoMessage() {}
|
||||
func (*Position) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_8dcb4638f9813bab, []int{0}
|
||||
}
|
||||
|
||||
func (m *Position) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Position.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Position.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Position) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Position.Merge(m, src)
|
||||
}
|
||||
func (m *Position) XXX_Size() int {
|
||||
return xxx_messageInfo_Position.Size(m)
|
||||
}
|
||||
func (m *Position) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Position.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Position proto.InternalMessageInfo
|
||||
|
||||
func (m *Position) GetReferenceName() string {
|
||||
if m != nil {
|
||||
return m.ReferenceName
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Position) GetPosition() int64 {
|
||||
if m != nil {
|
||||
return m.Position
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Position) GetReverseStrand() bool {
|
||||
if m != nil {
|
||||
return m.ReverseStrand
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*Position)(nil), "google.genomics.v1.Position")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/position.proto", fileDescriptor_8dcb4638f9813bab) }
|
||||
|
||||
var fileDescriptor_8dcb4638f9813bab = []byte{
|
||||
// 223 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0x41, 0x4b, 0x03, 0x31,
|
||||
0x14, 0x84, 0x89, 0x05, 0x59, 0x03, 0xf5, 0xb0, 0x07, 0x59, 0x8a, 0x87, 0x55, 0x10, 0xf6, 0x94,
|
||||
0x50, 0xbc, 0xe9, 0xad, 0x3f, 0x40, 0x96, 0x7a, 0xf3, 0x52, 0x9e, 0xeb, 0x33, 0x06, 0xba, 0xef,
|
||||
0x85, 0x24, 0xec, 0x6f, 0xf7, 0x28, 0x49, 0x9a, 0x22, 0xf4, 0x96, 0x4c, 0x66, 0x26, 0x1f, 0x23,
|
||||
0x1f, 0x0c, 0xb3, 0x39, 0xa2, 0x36, 0x48, 0x3c, 0xdb, 0x29, 0xe8, 0x65, 0xab, 0x1d, 0x07, 0x1b,
|
||||
0x2d, 0x93, 0x72, 0x9e, 0x23, 0xb7, 0x6d, 0xb1, 0xa8, 0x6a, 0x51, 0xcb, 0x76, 0x73, 0x7f, 0x8a,
|
||||
0x81, 0xb3, 0x1a, 0x88, 0x38, 0x42, 0x0a, 0x84, 0x92, 0x78, 0x8c, 0xb2, 0x19, 0x4f, 0x1d, 0xed,
|
||||
0x93, 0xbc, 0xf5, 0xf8, 0x8d, 0x1e, 0x69, 0xc2, 0x03, 0xc1, 0x8c, 0x9d, 0xe8, 0xc5, 0x70, 0xb3,
|
||||
0x5f, 0x9f, 0xd5, 0x37, 0x98, 0xb1, 0xdd, 0xc8, 0xa6, 0x7e, 0xdb, 0x5d, 0xf5, 0x62, 0x58, 0xed,
|
||||
0xcf, 0xf7, 0x52, 0xb1, 0xa0, 0x0f, 0x78, 0x08, 0xd1, 0x03, 0x7d, 0x75, 0xab, 0x5e, 0x0c, 0x4d,
|
||||
0xaa, 0xc8, 0xea, 0x7b, 0x16, 0x77, 0x3f, 0xf2, 0x6e, 0xe2, 0x59, 0x5d, 0xd2, 0xee, 0xd6, 0x95,
|
||||
0x66, 0x4c, 0x78, 0xa3, 0xf8, 0x78, 0xa9, 0x26, 0x3e, 0x02, 0x19, 0xc5, 0xde, 0xa4, 0x01, 0x32,
|
||||
0xbc, 0x2e, 0x4f, 0xe0, 0x6c, 0xf8, 0x3f, 0xca, 0x6b, 0x3d, 0xff, 0x0a, 0xf1, 0x79, 0x9d, 0x9d,
|
||||
0xcf, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x5c, 0xc6, 0x22, 0xea, 0x3d, 0x01, 0x00, 0x00,
|
||||
}
|
106
vendor/google.golang.org/genproto/googleapis/genomics/v1/range.pb.go
generated
vendored
Normal file
106
vendor/google.golang.org/genproto/googleapis/genomics/v1/range.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,106 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/range.proto
|
||||
|
||||
package genomics
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
math "math"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A 0-based half-open genomic coordinate range for search requests.
|
||||
type Range struct {
|
||||
// The reference sequence name, for example `chr1`,
|
||||
// `1`, or `chrX`.
|
||||
ReferenceName string `protobuf:"bytes,1,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"`
|
||||
// The start position of the range on the reference, 0-based inclusive.
|
||||
Start int64 `protobuf:"varint,2,opt,name=start,proto3" json:"start,omitempty"`
|
||||
// The end position of the range on the reference, 0-based exclusive.
|
||||
End int64 `protobuf:"varint,3,opt,name=end,proto3" json:"end,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Range) Reset() { *m = Range{} }
|
||||
func (m *Range) String() string { return proto.CompactTextString(m) }
|
||||
func (*Range) ProtoMessage() {}
|
||||
func (*Range) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_296c87a77538e516, []int{0}
|
||||
}
|
||||
|
||||
func (m *Range) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Range.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Range) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Range.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Range) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Range.Merge(m, src)
|
||||
}
|
||||
func (m *Range) XXX_Size() int {
|
||||
return xxx_messageInfo_Range.Size(m)
|
||||
}
|
||||
func (m *Range) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Range.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Range proto.InternalMessageInfo
|
||||
|
||||
func (m *Range) GetReferenceName() string {
|
||||
if m != nil {
|
||||
return m.ReferenceName
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Range) GetStart() int64 {
|
||||
if m != nil {
|
||||
return m.Start
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Range) GetEnd() int64 {
|
||||
if m != nil {
|
||||
return m.End
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*Range)(nil), "google.genomics.v1.Range")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/range.proto", fileDescriptor_296c87a77538e516) }
|
||||
|
||||
var fileDescriptor_296c87a77538e516 = []byte{
|
||||
// 209 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x4f, 0x4d, 0x4b, 0xc4, 0x30,
|
||||
0x10, 0x25, 0x96, 0x15, 0x0c, 0x28, 0x12, 0x44, 0x8a, 0x88, 0x2c, 0x82, 0xb0, 0xa7, 0x84, 0xe2,
|
||||
0x4d, 0x6f, 0xfd, 0x01, 0x52, 0x7a, 0xf0, 0xe0, 0x45, 0xc6, 0x3a, 0x86, 0x40, 0x33, 0x53, 0x92,
|
||||
0xd0, 0xdf, 0xee, 0x51, 0x92, 0x58, 0x11, 0xf6, 0x36, 0x79, 0x1f, 0x79, 0xef, 0xc9, 0x3b, 0xcb,
|
||||
0x6c, 0x67, 0x34, 0x16, 0x89, 0xbd, 0x9b, 0xa2, 0x59, 0x3b, 0x13, 0x80, 0x2c, 0xea, 0x25, 0x70,
|
||||
0x62, 0xa5, 0x2a, 0xaf, 0x37, 0x5e, 0xaf, 0xdd, 0xcd, 0xed, 0xaf, 0x07, 0x16, 0x67, 0x80, 0x88,
|
||||
0x13, 0x24, 0xc7, 0x14, 0xab, 0xe3, 0xfe, 0x55, 0xee, 0xc6, 0xfc, 0x81, 0x7a, 0x90, 0x17, 0x01,
|
||||
0xbf, 0x30, 0x20, 0x4d, 0xf8, 0x4e, 0xe0, 0xb1, 0x15, 0x7b, 0x71, 0x38, 0x1b, 0xcf, 0xff, 0xd0,
|
||||
0x17, 0xf0, 0xa8, 0xae, 0xe4, 0x2e, 0x26, 0x08, 0xa9, 0x3d, 0xd9, 0x8b, 0x43, 0x33, 0xd6, 0x87,
|
||||
0xba, 0x94, 0x0d, 0xd2, 0x67, 0xdb, 0x14, 0x2c, 0x9f, 0x3d, 0xca, 0xeb, 0x89, 0xbd, 0x3e, 0xee,
|
||||
0xd3, 0xcb, 0x92, 0x37, 0xe4, 0xf4, 0x41, 0xbc, 0x3d, 0x6d, 0x0a, 0x9e, 0x81, 0xac, 0xe6, 0x60,
|
||||
0xf3, 0xb8, 0xd2, 0xcd, 0x54, 0x0a, 0x16, 0x17, 0xff, 0x0f, 0x7e, 0xde, 0xee, 0x6f, 0x21, 0x3e,
|
||||
0x4e, 0x8b, 0xf2, 0xf1, 0x27, 0x00, 0x00, 0xff, 0xff, 0xb7, 0x3e, 0xf1, 0x62, 0x19, 0x01, 0x00,
|
||||
0x00,
|
||||
}
|
450
vendor/google.golang.org/genproto/googleapis/genomics/v1/readalignment.pb.go
generated
vendored
Normal file
450
vendor/google.golang.org/genproto/googleapis/genomics/v1/readalignment.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,450 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/readalignment.proto
|
||||
|
||||
package genomics
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
_struct "github.com/golang/protobuf/ptypes/struct"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
math "math"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A linear alignment can be represented by one CIGAR string. Describes the
|
||||
// mapped position and local alignment of the read to the reference.
|
||||
type LinearAlignment struct {
|
||||
// The position of this alignment.
|
||||
Position *Position `protobuf:"bytes,1,opt,name=position,proto3" json:"position,omitempty"`
|
||||
// The mapping quality of this alignment. Represents how likely
|
||||
// the read maps to this position as opposed to other locations.
|
||||
//
|
||||
// Specifically, this is -10 log10 Pr(mapping position is wrong), rounded to
|
||||
// the nearest integer.
|
||||
MappingQuality int32 `protobuf:"varint,2,opt,name=mapping_quality,json=mappingQuality,proto3" json:"mapping_quality,omitempty"`
|
||||
// Represents the local alignment of this sequence (alignment matches, indels,
|
||||
// etc) against the reference.
|
||||
Cigar []*CigarUnit `protobuf:"bytes,3,rep,name=cigar,proto3" json:"cigar,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *LinearAlignment) Reset() { *m = LinearAlignment{} }
|
||||
func (m *LinearAlignment) String() string { return proto.CompactTextString(m) }
|
||||
func (*LinearAlignment) ProtoMessage() {}
|
||||
func (*LinearAlignment) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_5882c5c7543d07e3, []int{0}
|
||||
}
|
||||
|
||||
func (m *LinearAlignment) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_LinearAlignment.Unmarshal(m, b)
|
||||
}
|
||||
func (m *LinearAlignment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_LinearAlignment.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *LinearAlignment) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_LinearAlignment.Merge(m, src)
|
||||
}
|
||||
func (m *LinearAlignment) XXX_Size() int {
|
||||
return xxx_messageInfo_LinearAlignment.Size(m)
|
||||
}
|
||||
func (m *LinearAlignment) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_LinearAlignment.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_LinearAlignment proto.InternalMessageInfo
|
||||
|
||||
func (m *LinearAlignment) GetPosition() *Position {
|
||||
if m != nil {
|
||||
return m.Position
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *LinearAlignment) GetMappingQuality() int32 {
|
||||
if m != nil {
|
||||
return m.MappingQuality
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *LinearAlignment) GetCigar() []*CigarUnit {
|
||||
if m != nil {
|
||||
return m.Cigar
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A read alignment describes a linear alignment of a string of DNA to a
|
||||
// [reference sequence][google.genomics.v1.Reference], in addition to metadata
|
||||
// about the fragment (the molecule of DNA sequenced) and the read (the bases
|
||||
// which were read by the sequencer). A read is equivalent to a line in a SAM
|
||||
// file. A read belongs to exactly one read group and exactly one
|
||||
// [read group set][google.genomics.v1.ReadGroupSet].
|
||||
//
|
||||
// For more genomics resource definitions, see [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
//
|
||||
// ### Reverse-stranded reads
|
||||
//
|
||||
// Mapped reads (reads having a non-null `alignment`) can be aligned to either
|
||||
// the forward or the reverse strand of their associated reference. Strandedness
|
||||
// of a mapped read is encoded by `alignment.position.reverseStrand`.
|
||||
//
|
||||
// If we consider the reference to be a forward-stranded coordinate space of
|
||||
// `[0, reference.length)` with `0` as the left-most position and
|
||||
// `reference.length` as the right-most position, reads are always aligned left
|
||||
// to right. That is, `alignment.position.position` always refers to the
|
||||
// left-most reference coordinate and `alignment.cigar` describes the alignment
|
||||
// of this read to the reference from left to right. All per-base fields such as
|
||||
// `alignedSequence` and `alignedQuality` share this same left-to-right
|
||||
// orientation; this is true of reads which are aligned to either strand. For
|
||||
// reverse-stranded reads, this means that `alignedSequence` is the reverse
|
||||
// complement of the bases that were originally reported by the sequencing
|
||||
// machine.
|
||||
//
|
||||
// ### Generating a reference-aligned sequence string
|
||||
//
|
||||
// When interacting with mapped reads, it's often useful to produce a string
|
||||
// representing the local alignment of the read to reference. The following
|
||||
// pseudocode demonstrates one way of doing this:
|
||||
//
|
||||
// out = ""
|
||||
// offset = 0
|
||||
// for c in read.alignment.cigar {
|
||||
// switch c.operation {
|
||||
// case "ALIGNMENT_MATCH", "SEQUENCE_MATCH", "SEQUENCE_MISMATCH":
|
||||
// out += read.alignedSequence[offset:offset+c.operationLength]
|
||||
// offset += c.operationLength
|
||||
// break
|
||||
// case "CLIP_SOFT", "INSERT":
|
||||
// offset += c.operationLength
|
||||
// break
|
||||
// case "PAD":
|
||||
// out += repeat("*", c.operationLength)
|
||||
// break
|
||||
// case "DELETE":
|
||||
// out += repeat("-", c.operationLength)
|
||||
// break
|
||||
// case "SKIP":
|
||||
// out += repeat(" ", c.operationLength)
|
||||
// break
|
||||
// case "CLIP_HARD":
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// return out
|
||||
//
|
||||
// ### Converting to SAM's CIGAR string
|
||||
//
|
||||
// The following pseudocode generates a SAM CIGAR string from the
|
||||
// `cigar` field. Note that this is a lossy conversion
|
||||
// (`cigar.referenceSequence` is lost).
|
||||
//
|
||||
// cigarMap = {
|
||||
// "ALIGNMENT_MATCH": "M",
|
||||
// "INSERT": "I",
|
||||
// "DELETE": "D",
|
||||
// "SKIP": "N",
|
||||
// "CLIP_SOFT": "S",
|
||||
// "CLIP_HARD": "H",
|
||||
// "PAD": "P",
|
||||
// "SEQUENCE_MATCH": "=",
|
||||
// "SEQUENCE_MISMATCH": "X",
|
||||
// }
|
||||
// cigarStr = ""
|
||||
// for c in read.alignment.cigar {
|
||||
// cigarStr += c.operationLength + cigarMap[c.operation]
|
||||
// }
|
||||
// return cigarStr
|
||||
type Read struct {
|
||||
// The server-generated read ID, unique across all reads. This is different
|
||||
// from the `fragmentName`.
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The ID of the read group this read belongs to. A read belongs to exactly
|
||||
// one read group. This is a server-generated ID which is distinct from SAM's
|
||||
// RG tag (for that value, see
|
||||
// [ReadGroup.name][google.genomics.v1.ReadGroup.name]).
|
||||
ReadGroupId string `protobuf:"bytes,2,opt,name=read_group_id,json=readGroupId,proto3" json:"read_group_id,omitempty"`
|
||||
// The ID of the read group set this read belongs to. A read belongs to
|
||||
// exactly one read group set.
|
||||
ReadGroupSetId string `protobuf:"bytes,3,opt,name=read_group_set_id,json=readGroupSetId,proto3" json:"read_group_set_id,omitempty"`
|
||||
// The fragment name. Equivalent to QNAME (query template name) in SAM.
|
||||
FragmentName string `protobuf:"bytes,4,opt,name=fragment_name,json=fragmentName,proto3" json:"fragment_name,omitempty"`
|
||||
// The orientation and the distance between reads from the fragment are
|
||||
// consistent with the sequencing protocol (SAM flag 0x2).
|
||||
ProperPlacement bool `protobuf:"varint,5,opt,name=proper_placement,json=properPlacement,proto3" json:"proper_placement,omitempty"`
|
||||
// The fragment is a PCR or optical duplicate (SAM flag 0x400).
|
||||
DuplicateFragment bool `protobuf:"varint,6,opt,name=duplicate_fragment,json=duplicateFragment,proto3" json:"duplicate_fragment,omitempty"`
|
||||
// The observed length of the fragment, equivalent to TLEN in SAM.
|
||||
FragmentLength int32 `protobuf:"varint,7,opt,name=fragment_length,json=fragmentLength,proto3" json:"fragment_length,omitempty"`
|
||||
// The read number in sequencing. 0-based and less than numberReads. This
|
||||
// field replaces SAM flag 0x40 and 0x80.
|
||||
ReadNumber int32 `protobuf:"varint,8,opt,name=read_number,json=readNumber,proto3" json:"read_number,omitempty"`
|
||||
// The number of reads in the fragment (extension to SAM flag 0x1).
|
||||
NumberReads int32 `protobuf:"varint,9,opt,name=number_reads,json=numberReads,proto3" json:"number_reads,omitempty"`
|
||||
// Whether this read did not pass filters, such as platform or vendor quality
|
||||
// controls (SAM flag 0x200).
|
||||
FailedVendorQualityChecks bool `protobuf:"varint,10,opt,name=failed_vendor_quality_checks,json=failedVendorQualityChecks,proto3" json:"failed_vendor_quality_checks,omitempty"`
|
||||
// The linear alignment for this alignment record. This field is null for
|
||||
// unmapped reads.
|
||||
Alignment *LinearAlignment `protobuf:"bytes,11,opt,name=alignment,proto3" json:"alignment,omitempty"`
|
||||
// Whether this alignment is secondary. Equivalent to SAM flag 0x100.
|
||||
// A secondary alignment represents an alternative to the primary alignment
|
||||
// for this read. Aligners may return secondary alignments if a read can map
|
||||
// ambiguously to multiple coordinates in the genome. By convention, each read
|
||||
// has one and only one alignment where both `secondaryAlignment`
|
||||
// and `supplementaryAlignment` are false.
|
||||
SecondaryAlignment bool `protobuf:"varint,12,opt,name=secondary_alignment,json=secondaryAlignment,proto3" json:"secondary_alignment,omitempty"`
|
||||
// Whether this alignment is supplementary. Equivalent to SAM flag 0x800.
|
||||
// Supplementary alignments are used in the representation of a chimeric
|
||||
// alignment. In a chimeric alignment, a read is split into multiple
|
||||
// linear alignments that map to different reference contigs. The first
|
||||
// linear alignment in the read will be designated as the representative
|
||||
// alignment; the remaining linear alignments will be designated as
|
||||
// supplementary alignments. These alignments may have different mapping
|
||||
// quality scores. In each linear alignment in a chimeric alignment, the read
|
||||
// will be hard clipped. The `alignedSequence` and
|
||||
// `alignedQuality` fields in the alignment record will only
|
||||
// represent the bases for its respective linear alignment.
|
||||
SupplementaryAlignment bool `protobuf:"varint,13,opt,name=supplementary_alignment,json=supplementaryAlignment,proto3" json:"supplementary_alignment,omitempty"`
|
||||
// The bases of the read sequence contained in this alignment record,
|
||||
// **without CIGAR operations applied** (equivalent to SEQ in SAM).
|
||||
// `alignedSequence` and `alignedQuality` may be
|
||||
// shorter than the full read sequence and quality. This will occur if the
|
||||
// alignment is part of a chimeric alignment, or if the read was trimmed. When
|
||||
// this occurs, the CIGAR for this read will begin/end with a hard clip
|
||||
// operator that will indicate the length of the excised sequence.
|
||||
AlignedSequence string `protobuf:"bytes,14,opt,name=aligned_sequence,json=alignedSequence,proto3" json:"aligned_sequence,omitempty"`
|
||||
// The quality of the read sequence contained in this alignment record
|
||||
// (equivalent to QUAL in SAM).
|
||||
// `alignedSequence` and `alignedQuality` may be shorter than the full read
|
||||
// sequence and quality. This will occur if the alignment is part of a
|
||||
// chimeric alignment, or if the read was trimmed. When this occurs, the CIGAR
|
||||
// for this read will begin/end with a hard clip operator that will indicate
|
||||
// the length of the excised sequence.
|
||||
AlignedQuality []int32 `protobuf:"varint,15,rep,packed,name=aligned_quality,json=alignedQuality,proto3" json:"aligned_quality,omitempty"`
|
||||
// The mapping of the primary alignment of the
|
||||
// `(readNumber+1)%numberReads` read in the fragment. It replaces
|
||||
// mate position and mate strand in SAM.
|
||||
NextMatePosition *Position `protobuf:"bytes,16,opt,name=next_mate_position,json=nextMatePosition,proto3" json:"next_mate_position,omitempty"`
|
||||
// A map of additional read alignment information. This must be of the form
|
||||
// map<string, string[]> (string key mapping to a list of string values).
|
||||
Info map[string]*_struct.ListValue `protobuf:"bytes,17,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Read) Reset() { *m = Read{} }
|
||||
func (m *Read) String() string { return proto.CompactTextString(m) }
|
||||
func (*Read) ProtoMessage() {}
|
||||
func (*Read) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_5882c5c7543d07e3, []int{1}
|
||||
}
|
||||
|
||||
func (m *Read) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Read.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Read) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Read.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Read) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Read.Merge(m, src)
|
||||
}
|
||||
func (m *Read) XXX_Size() int {
|
||||
return xxx_messageInfo_Read.Size(m)
|
||||
}
|
||||
func (m *Read) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Read.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Read proto.InternalMessageInfo
|
||||
|
||||
func (m *Read) GetId() string {
|
||||
if m != nil {
|
||||
return m.Id
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Read) GetReadGroupId() string {
|
||||
if m != nil {
|
||||
return m.ReadGroupId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Read) GetReadGroupSetId() string {
|
||||
if m != nil {
|
||||
return m.ReadGroupSetId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Read) GetFragmentName() string {
|
||||
if m != nil {
|
||||
return m.FragmentName
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Read) GetProperPlacement() bool {
|
||||
if m != nil {
|
||||
return m.ProperPlacement
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *Read) GetDuplicateFragment() bool {
|
||||
if m != nil {
|
||||
return m.DuplicateFragment
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *Read) GetFragmentLength() int32 {
|
||||
if m != nil {
|
||||
return m.FragmentLength
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Read) GetReadNumber() int32 {
|
||||
if m != nil {
|
||||
return m.ReadNumber
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Read) GetNumberReads() int32 {
|
||||
if m != nil {
|
||||
return m.NumberReads
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Read) GetFailedVendorQualityChecks() bool {
|
||||
if m != nil {
|
||||
return m.FailedVendorQualityChecks
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *Read) GetAlignment() *LinearAlignment {
|
||||
if m != nil {
|
||||
return m.Alignment
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Read) GetSecondaryAlignment() bool {
|
||||
if m != nil {
|
||||
return m.SecondaryAlignment
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *Read) GetSupplementaryAlignment() bool {
|
||||
if m != nil {
|
||||
return m.SupplementaryAlignment
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *Read) GetAlignedSequence() string {
|
||||
if m != nil {
|
||||
return m.AlignedSequence
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Read) GetAlignedQuality() []int32 {
|
||||
if m != nil {
|
||||
return m.AlignedQuality
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Read) GetNextMatePosition() *Position {
|
||||
if m != nil {
|
||||
return m.NextMatePosition
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Read) GetInfo() map[string]*_struct.ListValue {
|
||||
if m != nil {
|
||||
return m.Info
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*LinearAlignment)(nil), "google.genomics.v1.LinearAlignment")
|
||||
proto.RegisterType((*Read)(nil), "google.genomics.v1.Read")
|
||||
proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.Read.InfoEntry")
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/readalignment.proto", fileDescriptor_5882c5c7543d07e3)
|
||||
}
|
||||
|
||||
var fileDescriptor_5882c5c7543d07e3 = []byte{
|
||||
// 683 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x94, 0xcd, 0x4e, 0xdb, 0x4a,
|
||||
0x14, 0xc7, 0xe5, 0x84, 0x70, 0xc9, 0x09, 0x24, 0x61, 0xae, 0xc4, 0xf5, 0x8d, 0xb8, 0xb7, 0x21,
|
||||
0x48, 0x6d, 0x58, 0xd4, 0x2e, 0x20, 0xb5, 0x88, 0x2e, 0x2a, 0x40, 0x6d, 0x45, 0x45, 0x51, 0x6a,
|
||||
0x54, 0x16, 0xdd, 0x58, 0x83, 0x7d, 0x62, 0x46, 0xd8, 0x33, 0xc6, 0x1e, 0x47, 0xcd, 0x23, 0xf5,
|
||||
0xdd, 0xfa, 0x00, 0x5d, 0x56, 0x33, 0xf6, 0x38, 0xd0, 0x66, 0xd1, 0x5d, 0xf2, 0x3f, 0xbf, 0xf3,
|
||||
0xe1, 0xf3, 0x31, 0xf0, 0x34, 0x12, 0x22, 0x8a, 0xd1, 0x8d, 0x90, 0x8b, 0x84, 0x05, 0xb9, 0x3b,
|
||||
0xdb, 0x77, 0x33, 0xa4, 0x21, 0x8d, 0x59, 0xc4, 0x13, 0xe4, 0xd2, 0x49, 0x33, 0x21, 0x05, 0x21,
|
||||
0x25, 0xe7, 0x18, 0xce, 0x99, 0xed, 0x0f, 0xb6, 0x2b, 0x5f, 0x9a, 0x32, 0x97, 0x72, 0x2e, 0x24,
|
||||
0x95, 0x4c, 0xf0, 0xbc, 0xf4, 0x18, 0xfc, 0xbf, 0x24, 0x72, 0xc0, 0x22, 0x9a, 0x55, 0xf6, 0x9d,
|
||||
0x25, 0xf6, 0x54, 0xe4, 0x4c, 0xc5, 0xa8, 0x10, 0x93, 0x40, 0xff, 0xbb, 0x29, 0xa6, 0x6e, 0x2e,
|
||||
0xb3, 0x22, 0xa8, 0x4a, 0x1a, 0x7d, 0xb3, 0xa0, 0x77, 0xc1, 0x38, 0xd2, 0xec, 0xc4, 0x14, 0x4b,
|
||||
0x8e, 0x60, 0xcd, 0xc4, 0xb0, 0xad, 0xa1, 0x35, 0xee, 0x1c, 0x6c, 0x3b, 0xbf, 0x57, 0xee, 0x4c,
|
||||
0x2a, 0xc6, 0xab, 0x69, 0xf2, 0x0c, 0x7a, 0x09, 0x4d, 0x53, 0xc6, 0x23, 0xff, 0xbe, 0xa0, 0x31,
|
||||
0x93, 0x73, 0xbb, 0x31, 0xb4, 0xc6, 0x2d, 0xaf, 0x5b, 0xc9, 0x9f, 0x4a, 0x95, 0x1c, 0x42, 0x4b,
|
||||
0x7f, 0x86, 0xdd, 0x1c, 0x36, 0xc7, 0x9d, 0x83, 0xff, 0x96, 0xc5, 0x3f, 0x53, 0xc0, 0x67, 0xce,
|
||||
0xa4, 0x57, 0xb2, 0xa3, 0xef, 0xab, 0xb0, 0xe2, 0x21, 0x0d, 0x49, 0x17, 0x1a, 0x2c, 0xd4, 0xa5,
|
||||
0xb5, 0xbd, 0x06, 0x0b, 0xc9, 0x08, 0x36, 0x54, 0xbb, 0xfd, 0x28, 0x13, 0x45, 0xea, 0xb3, 0x50,
|
||||
0x27, 0x6d, 0x7b, 0x1d, 0x25, 0xbe, 0x57, 0xda, 0x79, 0x48, 0xf6, 0x60, 0xf3, 0x01, 0x93, 0xa3,
|
||||
0x54, 0x5c, 0x53, 0x73, 0xdd, 0x9a, 0xbb, 0x42, 0x79, 0x1e, 0x92, 0x5d, 0xd8, 0x98, 0x66, 0x34,
|
||||
0x52, 0xbd, 0xf0, 0x39, 0x4d, 0xd0, 0x5e, 0xd1, 0xd8, 0xba, 0x11, 0x2f, 0x69, 0x82, 0x64, 0x0f,
|
||||
0xfa, 0x69, 0x26, 0x52, 0xcc, 0xfc, 0x34, 0xa6, 0x01, 0x2a, 0xdd, 0x6e, 0x0d, 0xad, 0xf1, 0x9a,
|
||||
0xd7, 0x2b, 0xf5, 0x89, 0x91, 0xc9, 0x73, 0x20, 0x61, 0x91, 0xc6, 0x2c, 0xa0, 0x12, 0x7d, 0x13,
|
||||
0xc4, 0x5e, 0xd5, 0xf0, 0x66, 0x6d, 0x79, 0x57, 0x19, 0x54, 0x13, 0xeb, 0xf4, 0x31, 0xf2, 0x48,
|
||||
0xde, 0xda, 0x7f, 0x95, 0x4d, 0x34, 0xf2, 0x85, 0x56, 0xc9, 0x13, 0xd0, 0x5f, 0xe8, 0xf3, 0x22,
|
||||
0xb9, 0xc1, 0xcc, 0x5e, 0xd3, 0x10, 0x28, 0xe9, 0x52, 0x2b, 0x64, 0x07, 0xd6, 0x4b, 0x9b, 0xaf,
|
||||
0xc4, 0xdc, 0x6e, 0x6b, 0xa2, 0x53, 0x6a, 0xaa, 0x93, 0x39, 0x79, 0x03, 0xdb, 0x53, 0xca, 0x62,
|
||||
0x0c, 0xfd, 0x19, 0xf2, 0x50, 0x64, 0x66, 0x6e, 0x7e, 0x70, 0x8b, 0xc1, 0x5d, 0x6e, 0x83, 0xae,
|
||||
0xf2, 0xdf, 0x92, 0xb9, 0xd6, 0x48, 0x35, 0xc3, 0x33, 0x0d, 0x90, 0x13, 0x68, 0xd7, 0x6b, 0x6e,
|
||||
0x77, 0xf4, 0xb6, 0xec, 0x2e, 0x9b, 0xe6, 0x2f, 0x4b, 0xe6, 0x2d, 0xbc, 0x88, 0x0b, 0x7f, 0xe7,
|
||||
0x18, 0x08, 0x1e, 0xd2, 0x6c, 0xee, 0x2f, 0x82, 0xad, 0xeb, 0xd4, 0xa4, 0x36, 0x2d, 0x16, 0xf4,
|
||||
0x15, 0xfc, 0x93, 0x17, 0x69, 0x1a, 0xeb, 0xf6, 0x3e, 0x76, 0xda, 0xd0, 0x4e, 0x5b, 0x8f, 0xcc,
|
||||
0x0b, 0xc7, 0x3d, 0xe8, 0x6b, 0x14, 0x43, 0x3f, 0xc7, 0xfb, 0x02, 0x79, 0x80, 0x76, 0x57, 0x0f,
|
||||
0xb7, 0x57, 0xe9, 0x57, 0x95, 0xac, 0xa6, 0x60, 0x50, 0xb3, 0xca, 0xbd, 0x61, 0x53, 0x4d, 0xa1,
|
||||
0x92, 0xcd, 0x2a, 0x7f, 0x00, 0xc2, 0xf1, 0xab, 0xf4, 0x13, 0x35, 0xdd, 0xfa, 0x6e, 0xfa, 0x7f,
|
||||
0x70, 0x37, 0x7d, 0xe5, 0xf7, 0x91, 0x4a, 0x34, 0x0a, 0x79, 0x09, 0x2b, 0x8c, 0x4f, 0x85, 0xbd,
|
||||
0xa9, 0xaf, 0x62, 0xb4, 0xcc, 0x5b, 0x8d, 0xcd, 0x39, 0xe7, 0x53, 0xf1, 0x96, 0xcb, 0x6c, 0xee,
|
||||
0x69, 0x7e, 0x70, 0x05, 0xed, 0x5a, 0x22, 0x7d, 0x68, 0xde, 0xe1, 0xbc, 0x3a, 0x0f, 0xf5, 0x93,
|
||||
0xbc, 0x80, 0xd6, 0x8c, 0xc6, 0x05, 0xea, 0xbb, 0xe8, 0x1c, 0x0c, 0x4c, 0x5c, 0xf3, 0x24, 0x38,
|
||||
0x17, 0x2c, 0x97, 0xd7, 0x8a, 0xf0, 0x4a, 0xf0, 0xb8, 0x71, 0x64, 0x9d, 0x26, 0xb0, 0x15, 0x88,
|
||||
0x64, 0x49, 0x0d, 0xa7, 0x44, 0x15, 0x51, 0x77, 0x75, 0xa2, 0xa2, 0x4c, 0xac, 0x2f, 0xc7, 0x86,
|
||||
0x14, 0x31, 0xe5, 0x91, 0x23, 0xb2, 0x48, 0x3d, 0x4b, 0x3a, 0x87, 0x5b, 0x9a, 0x68, 0xca, 0xf2,
|
||||
0x87, 0x4f, 0xd5, 0x6b, 0xf3, 0xfb, 0x87, 0x65, 0xdd, 0xac, 0x6a, 0xf2, 0xf0, 0x67, 0x00, 0x00,
|
||||
0x00, 0xff, 0xff, 0xd0, 0xe1, 0xf6, 0x57, 0x4d, 0x05, 0x00, 0x00,
|
||||
}
|
353
vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroup.pb.go
generated
vendored
Normal file
353
vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroup.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,353 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/readgroup.proto
|
||||
|
||||
package genomics
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
_struct "github.com/golang/protobuf/ptypes/struct"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
math "math"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A read group is all the data that's processed the same way by the sequencer.
|
||||
type ReadGroup struct {
|
||||
// The server-generated read group ID, unique for all read groups.
|
||||
// Note: This is different than the @RG ID field in the SAM spec. For that
|
||||
// value, see [name][google.genomics.v1.ReadGroup.name].
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The dataset to which this read group belongs.
|
||||
DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
// The read group name. This corresponds to the @RG ID field in the SAM spec.
|
||||
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
|
||||
// A free-form text description of this read group.
|
||||
Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
|
||||
// A client-supplied sample identifier for the reads in this read group.
|
||||
SampleId string `protobuf:"bytes,5,opt,name=sample_id,json=sampleId,proto3" json:"sample_id,omitempty"`
|
||||
// The experiment used to generate this read group.
|
||||
Experiment *ReadGroup_Experiment `protobuf:"bytes,6,opt,name=experiment,proto3" json:"experiment,omitempty"`
|
||||
// The predicted insert size of this read group. The insert size is the length
|
||||
// the sequenced DNA fragment from end-to-end, not including the adapters.
|
||||
PredictedInsertSize int32 `protobuf:"varint,7,opt,name=predicted_insert_size,json=predictedInsertSize,proto3" json:"predicted_insert_size,omitempty"`
|
||||
// The programs used to generate this read group. Programs are always
|
||||
// identical for all read groups within a read group set. For this reason,
|
||||
// only the first read group in a returned set will have this field
|
||||
// populated.
|
||||
Programs []*ReadGroup_Program `protobuf:"bytes,10,rep,name=programs,proto3" json:"programs,omitempty"`
|
||||
// The reference set the reads in this read group are aligned to.
|
||||
ReferenceSetId string `protobuf:"bytes,11,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"`
|
||||
// A map of additional read group information. This must be of the form
|
||||
// map<string, string[]> (string key mapping to a list of string values).
|
||||
Info map[string]*_struct.ListValue `protobuf:"bytes,12,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ReadGroup) Reset() { *m = ReadGroup{} }
|
||||
func (m *ReadGroup) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroup) ProtoMessage() {}
|
||||
func (*ReadGroup) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_78f10e430d3182d6, []int{0}
|
||||
}
|
||||
|
||||
func (m *ReadGroup) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ReadGroup.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ReadGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ReadGroup.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *ReadGroup) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ReadGroup.Merge(m, src)
|
||||
}
|
||||
func (m *ReadGroup) XXX_Size() int {
|
||||
return xxx_messageInfo_ReadGroup.Size(m)
|
||||
}
|
||||
func (m *ReadGroup) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ReadGroup.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ReadGroup proto.InternalMessageInfo
|
||||
|
||||
func (m *ReadGroup) GetId() string {
|
||||
if m != nil {
|
||||
return m.Id
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup) GetDatasetId() string {
|
||||
if m != nil {
|
||||
return m.DatasetId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup) GetDescription() string {
|
||||
if m != nil {
|
||||
return m.Description
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup) GetSampleId() string {
|
||||
if m != nil {
|
||||
return m.SampleId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup) GetExperiment() *ReadGroup_Experiment {
|
||||
if m != nil {
|
||||
return m.Experiment
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ReadGroup) GetPredictedInsertSize() int32 {
|
||||
if m != nil {
|
||||
return m.PredictedInsertSize
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *ReadGroup) GetPrograms() []*ReadGroup_Program {
|
||||
if m != nil {
|
||||
return m.Programs
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ReadGroup) GetReferenceSetId() string {
|
||||
if m != nil {
|
||||
return m.ReferenceSetId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup) GetInfo() map[string]*_struct.ListValue {
|
||||
if m != nil {
|
||||
return m.Info
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type ReadGroup_Experiment struct {
|
||||
// A client-supplied library identifier; a library is a collection of DNA
|
||||
// fragments which have been prepared for sequencing from a sample. This
|
||||
// field is important for quality control as error or bias can be introduced
|
||||
// during sample preparation.
|
||||
LibraryId string `protobuf:"bytes,1,opt,name=library_id,json=libraryId,proto3" json:"library_id,omitempty"`
|
||||
// The platform unit used as part of this experiment, for example
|
||||
// flowcell-barcode.lane for Illumina or slide for SOLiD. Corresponds to the
|
||||
// @RG PU field in the SAM spec.
|
||||
PlatformUnit string `protobuf:"bytes,2,opt,name=platform_unit,json=platformUnit,proto3" json:"platform_unit,omitempty"`
|
||||
// The sequencing center used as part of this experiment.
|
||||
SequencingCenter string `protobuf:"bytes,3,opt,name=sequencing_center,json=sequencingCenter,proto3" json:"sequencing_center,omitempty"`
|
||||
// The instrument model used as part of this experiment. This maps to
|
||||
// sequencing technology in the SAM spec.
|
||||
InstrumentModel string `protobuf:"bytes,4,opt,name=instrument_model,json=instrumentModel,proto3" json:"instrument_model,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Experiment) Reset() { *m = ReadGroup_Experiment{} }
|
||||
func (m *ReadGroup_Experiment) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroup_Experiment) ProtoMessage() {}
|
||||
func (*ReadGroup_Experiment) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_78f10e430d3182d6, []int{0, 0}
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Experiment) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ReadGroup_Experiment.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ReadGroup_Experiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ReadGroup_Experiment.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *ReadGroup_Experiment) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ReadGroup_Experiment.Merge(m, src)
|
||||
}
|
||||
func (m *ReadGroup_Experiment) XXX_Size() int {
|
||||
return xxx_messageInfo_ReadGroup_Experiment.Size(m)
|
||||
}
|
||||
func (m *ReadGroup_Experiment) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ReadGroup_Experiment.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ReadGroup_Experiment proto.InternalMessageInfo
|
||||
|
||||
func (m *ReadGroup_Experiment) GetLibraryId() string {
|
||||
if m != nil {
|
||||
return m.LibraryId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Experiment) GetPlatformUnit() string {
|
||||
if m != nil {
|
||||
return m.PlatformUnit
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Experiment) GetSequencingCenter() string {
|
||||
if m != nil {
|
||||
return m.SequencingCenter
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Experiment) GetInstrumentModel() string {
|
||||
if m != nil {
|
||||
return m.InstrumentModel
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type ReadGroup_Program struct {
|
||||
// The command line used to run this program.
|
||||
CommandLine string `protobuf:"bytes,1,opt,name=command_line,json=commandLine,proto3" json:"command_line,omitempty"`
|
||||
// The user specified locally unique ID of the program. Used along with
|
||||
// `prevProgramId` to define an ordering between programs.
|
||||
Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The display name of the program. This is typically the colloquial name of
|
||||
// the tool used, for example 'bwa' or 'picard'.
|
||||
Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"`
|
||||
// The ID of the program run before this one.
|
||||
PrevProgramId string `protobuf:"bytes,4,opt,name=prev_program_id,json=prevProgramId,proto3" json:"prev_program_id,omitempty"`
|
||||
// The version of the program run.
|
||||
Version string `protobuf:"bytes,5,opt,name=version,proto3" json:"version,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Program) Reset() { *m = ReadGroup_Program{} }
|
||||
func (m *ReadGroup_Program) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroup_Program) ProtoMessage() {}
|
||||
func (*ReadGroup_Program) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_78f10e430d3182d6, []int{0, 1}
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Program) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ReadGroup_Program.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ReadGroup_Program) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ReadGroup_Program.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *ReadGroup_Program) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ReadGroup_Program.Merge(m, src)
|
||||
}
|
||||
func (m *ReadGroup_Program) XXX_Size() int {
|
||||
return xxx_messageInfo_ReadGroup_Program.Size(m)
|
||||
}
|
||||
func (m *ReadGroup_Program) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ReadGroup_Program.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ReadGroup_Program proto.InternalMessageInfo
|
||||
|
||||
func (m *ReadGroup_Program) GetCommandLine() string {
|
||||
if m != nil {
|
||||
return m.CommandLine
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Program) GetId() string {
|
||||
if m != nil {
|
||||
return m.Id
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Program) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Program) GetPrevProgramId() string {
|
||||
if m != nil {
|
||||
return m.PrevProgramId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroup_Program) GetVersion() string {
|
||||
if m != nil {
|
||||
return m.Version
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*ReadGroup)(nil), "google.genomics.v1.ReadGroup")
|
||||
proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.ReadGroup.InfoEntry")
|
||||
proto.RegisterType((*ReadGroup_Experiment)(nil), "google.genomics.v1.ReadGroup.Experiment")
|
||||
proto.RegisterType((*ReadGroup_Program)(nil), "google.genomics.v1.ReadGroup.Program")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/genomics/v1/readgroup.proto", fileDescriptor_78f10e430d3182d6) }
|
||||
|
||||
var fileDescriptor_78f10e430d3182d6 = []byte{
|
||||
// 585 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x54, 0xcb, 0x6e, 0xd4, 0x30,
|
||||
0x14, 0x55, 0xa6, 0xcf, 0xb9, 0xd3, 0xc7, 0x60, 0x04, 0x8a, 0x06, 0x90, 0x86, 0x22, 0x60, 0x10,
|
||||
0x52, 0x42, 0x87, 0x0d, 0x6a, 0x57, 0x14, 0x55, 0x10, 0xa9, 0x48, 0x55, 0x2a, 0x58, 0xb0, 0x89,
|
||||
0xdc, 0xf8, 0x4e, 0x64, 0x91, 0xd8, 0xc1, 0x76, 0x46, 0xb4, 0x9f, 0xc1, 0x57, 0xf0, 0x2d, 0x7c,
|
||||
0x11, 0x4b, 0x64, 0xc7, 0x49, 0x47, 0xa2, 0xea, 0xce, 0x39, 0xe7, 0x5c, 0xdf, 0xc7, 0xb9, 0x0e,
|
||||
0x1c, 0x14, 0x52, 0x16, 0x25, 0xc6, 0x05, 0x0a, 0x59, 0xf1, 0x5c, 0xc7, 0xcb, 0xc3, 0x58, 0x21,
|
||||
0x65, 0x85, 0x92, 0x4d, 0x1d, 0xd5, 0x4a, 0x1a, 0x49, 0x48, 0xab, 0x89, 0x3a, 0x4d, 0xb4, 0x3c,
|
||||
0x9c, 0x3c, 0xf6, 0x71, 0xb4, 0xe6, 0x31, 0x15, 0x42, 0x1a, 0x6a, 0xb8, 0x14, 0xba, 0x8d, 0xe8,
|
||||
0x59, 0xf7, 0x75, 0xd9, 0x2c, 0x62, 0x6d, 0x54, 0x93, 0x9b, 0x96, 0x3d, 0xf8, 0xb3, 0x09, 0xc3,
|
||||
0x14, 0x29, 0xfb, 0x68, 0x73, 0x90, 0x3d, 0x18, 0x70, 0x16, 0x06, 0xd3, 0x60, 0x36, 0x4c, 0x07,
|
||||
0x9c, 0x91, 0x27, 0x00, 0x8c, 0x1a, 0xaa, 0xd1, 0x64, 0x9c, 0x85, 0x03, 0x87, 0x0f, 0x3d, 0x92,
|
||||
0x30, 0x42, 0x60, 0x5d, 0xd0, 0x0a, 0xc3, 0x35, 0x47, 0xb8, 0x33, 0x99, 0xc2, 0x88, 0xa1, 0xce,
|
||||
0x15, 0xaf, 0x6d, 0x11, 0xe1, 0xba, 0xa3, 0x56, 0x21, 0xf2, 0x08, 0x86, 0x9a, 0x56, 0x75, 0x89,
|
||||
0xf6, 0xce, 0x0d, 0xc7, 0x6f, 0xb7, 0x40, 0xc2, 0xc8, 0x27, 0x00, 0xfc, 0x59, 0xa3, 0xe2, 0x15,
|
||||
0x0a, 0x13, 0x6e, 0x4e, 0x83, 0xd9, 0x68, 0x3e, 0x8b, 0xfe, 0x6f, 0x3a, 0xea, 0x8b, 0x8e, 0x4e,
|
||||
0x7b, 0x7d, 0xba, 0x12, 0x4b, 0xe6, 0xf0, 0xa0, 0x56, 0xc8, 0x78, 0x6e, 0x90, 0x65, 0x5c, 0x68,
|
||||
0x54, 0x26, 0xd3, 0xfc, 0x1a, 0xc3, 0xad, 0x69, 0x30, 0xdb, 0x48, 0xef, 0xf7, 0x64, 0xe2, 0xb8,
|
||||
0x0b, 0x7e, 0x8d, 0xe4, 0x3d, 0x6c, 0xd7, 0x4a, 0x16, 0x8a, 0x56, 0x3a, 0x84, 0xe9, 0xda, 0x6c,
|
||||
0x34, 0x7f, 0x7e, 0x77, 0xee, 0xf3, 0x56, 0x9d, 0xf6, 0x61, 0x64, 0x06, 0x63, 0x85, 0x0b, 0x54,
|
||||
0x28, 0x72, 0xcc, 0xfc, 0xe0, 0x46, 0xae, 0xc9, 0xbd, 0x1e, 0xbf, 0x70, 0xd3, 0x3b, 0x86, 0x75,
|
||||
0x2e, 0x16, 0x32, 0xdc, 0x71, 0x89, 0x5e, 0xde, 0x9d, 0x28, 0x11, 0x0b, 0x79, 0x2a, 0x8c, 0xba,
|
||||
0x4a, 0x5d, 0xd0, 0xe4, 0x77, 0x00, 0x70, 0xd3, 0xb8, 0x35, 0xaa, 0xe4, 0x97, 0x8a, 0xaa, 0xab,
|
||||
0xac, 0x37, 0x70, 0xe8, 0x91, 0x84, 0x91, 0x67, 0xb0, 0x5b, 0x97, 0xd4, 0x2c, 0xa4, 0xaa, 0xb2,
|
||||
0x46, 0x70, 0xe3, 0xad, 0xdc, 0xe9, 0xc0, 0x2f, 0x82, 0x1b, 0xf2, 0x1a, 0xee, 0x69, 0xfc, 0xd1,
|
||||
0xa0, 0xc8, 0xb9, 0x28, 0xb2, 0x1c, 0x85, 0x41, 0xe5, 0xad, 0x1d, 0xdf, 0x10, 0x1f, 0x1c, 0x4e,
|
||||
0x5e, 0xc1, 0x98, 0x0b, 0xbb, 0x49, 0x36, 0x7d, 0x56, 0x49, 0x86, 0xa5, 0xf7, 0x7a, 0xff, 0x06,
|
||||
0xff, 0x6c, 0xe1, 0xc9, 0xaf, 0x00, 0xb6, 0xfc, 0x9c, 0xc8, 0x53, 0xd8, 0xc9, 0x65, 0x55, 0x51,
|
||||
0xc1, 0xb2, 0x92, 0x0b, 0xf4, 0x95, 0x8e, 0x3c, 0x76, 0xc6, 0x05, 0xfa, 0x1d, 0x1c, 0xf4, 0x3b,
|
||||
0x78, 0xdb, 0x92, 0xbd, 0x80, 0xfd, 0x5a, 0xe1, 0x32, 0xf3, 0x53, 0xb7, 0x3d, 0xb7, 0xc9, 0x77,
|
||||
0x2d, 0xec, 0x93, 0x25, 0x8c, 0x84, 0xb0, 0xb5, 0x44, 0xa5, 0xed, 0x22, 0xb6, 0x8b, 0xd6, 0x7d,
|
||||
0x4e, 0x2e, 0x60, 0xd8, 0x8f, 0x94, 0x8c, 0x61, 0xed, 0x3b, 0x5e, 0xf9, 0x62, 0xec, 0x91, 0xbc,
|
||||
0x81, 0x8d, 0x25, 0x2d, 0x1b, 0x74, 0x75, 0x8c, 0xe6, 0x93, 0xce, 0x9c, 0xee, 0x11, 0x45, 0x67,
|
||||
0x5c, 0x9b, 0xaf, 0x56, 0x91, 0xb6, 0xc2, 0xa3, 0xc1, 0xbb, 0xe0, 0x84, 0xc3, 0xc3, 0x5c, 0x56,
|
||||
0xb7, 0x18, 0x79, 0xb2, 0xd7, 0x3b, 0x79, 0x6e, 0x6f, 0x38, 0x0f, 0xbe, 0x1d, 0x75, 0x2a, 0x59,
|
||||
0x52, 0x51, 0x44, 0x52, 0x15, 0xf6, 0xdd, 0xbb, 0xfb, 0xe3, 0x96, 0xa2, 0x35, 0xd7, 0xab, 0xff,
|
||||
0x82, 0xe3, 0xee, 0xfc, 0x37, 0x08, 0x2e, 0x37, 0x9d, 0xf2, 0xed, 0xbf, 0x00, 0x00, 0x00, 0xff,
|
||||
0xff, 0x37, 0xed, 0xaa, 0xaa, 0x34, 0x04, 0x00, 0x00,
|
||||
}
|
166
vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroupset.pb.go
generated
vendored
Normal file
166
vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroupset.pb.go
generated
vendored
Normal file
|
@ -0,0 +1,166 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: google/genomics/v1/readgroupset.proto
|
||||
|
||||
package genomics
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
_struct "github.com/golang/protobuf/ptypes/struct"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
math "math"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// A read group set is a logical collection of read groups, which are
|
||||
// collections of reads produced by a sequencer. A read group set typically
|
||||
// models reads corresponding to one sample, sequenced one way, and aligned one
|
||||
// way.
|
||||
//
|
||||
// * A read group set belongs to one dataset.
|
||||
// * A read group belongs to one read group set.
|
||||
// * A read belongs to one read group.
|
||||
//
|
||||
// For more genomics resource definitions, see [Fundamentals of Google
|
||||
// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics)
|
||||
type ReadGroupSet struct {
|
||||
// The server-generated read group set ID, unique for all read group sets.
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
// The dataset to which this read group set belongs.
|
||||
DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"`
|
||||
// The reference set to which the reads in this read group set are aligned.
|
||||
ReferenceSetId string `protobuf:"bytes,3,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"`
|
||||
// The read group set name. By default this will be initialized to the sample
|
||||
// name of the sequenced data contained in this set.
|
||||
Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"`
|
||||
// The filename of the original source file for this read group set, if any.
|
||||
Filename string `protobuf:"bytes,5,opt,name=filename,proto3" json:"filename,omitempty"`
|
||||
// The read groups in this set. There are typically 1-10 read groups in a read
|
||||
// group set.
|
||||
ReadGroups []*ReadGroup `protobuf:"bytes,6,rep,name=read_groups,json=readGroups,proto3" json:"read_groups,omitempty"`
|
||||
// A map of additional read group set information.
|
||||
Info map[string]*_struct.ListValue `protobuf:"bytes,7,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ReadGroupSet) Reset() { *m = ReadGroupSet{} }
|
||||
func (m *ReadGroupSet) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadGroupSet) ProtoMessage() {}
|
||||
func (*ReadGroupSet) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_976a96d3fd28f245, []int{0}
|
||||
}
|
||||
|
||||
func (m *ReadGroupSet) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ReadGroupSet.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ReadGroupSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ReadGroupSet.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *ReadGroupSet) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ReadGroupSet.Merge(m, src)
|
||||
}
|
||||
func (m *ReadGroupSet) XXX_Size() int {
|
||||
return xxx_messageInfo_ReadGroupSet.Size(m)
|
||||
}
|
||||
func (m *ReadGroupSet) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ReadGroupSet.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ReadGroupSet proto.InternalMessageInfo
|
||||
|
||||
func (m *ReadGroupSet) GetId() string {
|
||||
if m != nil {
|
||||
return m.Id
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroupSet) GetDatasetId() string {
|
||||
if m != nil {
|
||||
return m.DatasetId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroupSet) GetReferenceSetId() string {
|
||||
if m != nil {
|
||||
return m.ReferenceSetId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroupSet) GetName() string {
|
||||
if m != nil {
|
||||
return m.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroupSet) GetFilename() string {
|
||||
if m != nil {
|
||||
return m.Filename
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ReadGroupSet) GetReadGroups() []*ReadGroup {
|
||||
if m != nil {
|
||||
return m.ReadGroups
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ReadGroupSet) GetInfo() map[string]*_struct.ListValue {
|
||||
if m != nil {
|
||||
return m.Info
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*ReadGroupSet)(nil), "google.genomics.v1.ReadGroupSet")
|
||||
proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.ReadGroupSet.InfoEntry")
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterFile("google/genomics/v1/readgroupset.proto", fileDescriptor_976a96d3fd28f245)
|
||||
}
|
||||
|
||||
var fileDescriptor_976a96d3fd28f245 = []byte{
|
||||
// 367 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x8b, 0xdb, 0x30,
|
||||
0x10, 0xc5, 0xb1, 0xf3, 0xa7, 0xcd, 0xa4, 0x84, 0x54, 0x87, 0x62, 0x4c, 0x03, 0x21, 0x50, 0x08,
|
||||
0x3d, 0xc8, 0x4d, 0x7a, 0x29, 0x29, 0xe4, 0x10, 0x28, 0x25, 0xb0, 0x87, 0x60, 0xc3, 0x1e, 0xf6,
|
||||
0x12, 0x14, 0x7b, 0x6c, 0xc4, 0x3a, 0x92, 0x91, 0xe4, 0x40, 0xbe, 0xf3, 0x7e, 0x80, 0x3d, 0x2e,
|
||||
0x96, 0xff, 0x10, 0xd8, 0x25, 0xb7, 0xd1, 0xd3, 0xef, 0x8d, 0x46, 0x6f, 0xe0, 0x47, 0x26, 0x65,
|
||||
0x96, 0x63, 0x90, 0xa1, 0x90, 0x67, 0x1e, 0xeb, 0xe0, 0xb2, 0x0a, 0x14, 0xb2, 0x24, 0x53, 0xb2,
|
||||
0x2c, 0x34, 0x1a, 0x5a, 0x28, 0x69, 0x24, 0x21, 0x35, 0x46, 0x5b, 0x8c, 0x5e, 0x56, 0xfe, 0xf7,
|
||||
0xc6, 0xca, 0x0a, 0x1e, 0x30, 0x21, 0xa4, 0x61, 0x86, 0x4b, 0xa1, 0x6b, 0x87, 0xbf, 0xb8, 0xd7,
|
||||
0xb8, 0x61, 0xda, 0x0e, 0xf6, 0x74, 0x2a, 0xd3, 0x40, 0x1b, 0x55, 0xc6, 0xcd, 0x9b, 0x8b, 0x17,
|
||||
0x17, 0xbe, 0x84, 0xc8, 0x92, 0xff, 0x95, 0x23, 0x42, 0x43, 0x26, 0xe0, 0xf2, 0xc4, 0x73, 0xe6,
|
||||
0xce, 0x72, 0x14, 0xba, 0x3c, 0x21, 0x33, 0x80, 0x84, 0x19, 0xa6, 0xd1, 0x1c, 0x79, 0xe2, 0xb9,
|
||||
0x56, 0x1f, 0x35, 0xca, 0x3e, 0x21, 0x4b, 0x98, 0x2a, 0x4c, 0x51, 0xa1, 0x88, 0xf1, 0xd8, 0x40,
|
||||
0x3d, 0x0b, 0x4d, 0x3a, 0x3d, 0xb2, 0x24, 0x81, 0xbe, 0x60, 0x67, 0xf4, 0xfa, 0xf6, 0xd6, 0xd6,
|
||||
0xc4, 0x87, 0xcf, 0x29, 0xcf, 0xd1, 0xea, 0x03, 0xab, 0x77, 0x67, 0xb2, 0x85, 0x71, 0xf5, 0x95,
|
||||
0x63, 0x1d, 0x92, 0x37, 0x9c, 0xf7, 0x96, 0xe3, 0xf5, 0x8c, 0xbe, 0xcf, 0x88, 0x76, 0xf3, 0x87,
|
||||
0xa0, 0xda, 0x52, 0x93, 0x2d, 0xf4, 0xb9, 0x48, 0xa5, 0xf7, 0xc9, 0x1a, 0x7f, 0xde, 0x35, 0x46,
|
||||
0x68, 0xe8, 0x5e, 0xa4, 0xf2, 0x9f, 0x30, 0xea, 0x1a, 0x5a, 0x9f, 0x1f, 0xc1, 0xa8, 0x93, 0xc8,
|
||||
0x14, 0x7a, 0xcf, 0x78, 0x6d, 0x62, 0xa9, 0x4a, 0xf2, 0x0b, 0x06, 0x17, 0x96, 0x97, 0x68, 0x23,
|
||||
0x19, 0xaf, 0xfd, 0xb6, 0x7f, 0x1b, 0x33, 0x7d, 0xe0, 0xda, 0x3c, 0x56, 0x44, 0x58, 0x83, 0x1b,
|
||||
0xf7, 0x8f, 0xb3, 0xcb, 0xe1, 0x5b, 0x2c, 0xcf, 0x1f, 0xcc, 0xb2, 0xfb, 0x7a, 0x3b, 0xcc, 0xa1,
|
||||
0x6a, 0x72, 0x70, 0x9e, 0x36, 0x2d, 0x28, 0x73, 0x26, 0x32, 0x2a, 0x55, 0x56, 0xad, 0xda, 0x3e,
|
||||
0x11, 0xd4, 0x57, 0xac, 0xe0, 0xfa, 0x76, 0xfd, 0x7f, 0xdb, 0xfa, 0xd5, 0x71, 0x4e, 0x43, 0x4b,
|
||||
0xfe, 0x7e, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x1f, 0xa9, 0x2f, 0xa5, 0x80, 0x02, 0x00, 0x00,
|
||||
}
|
1769
vendor/google.golang.org/genproto/googleapis/genomics/v1/reads.pb.go
generated
vendored
Normal file
1769
vendor/google.golang.org/genproto/googleapis/genomics/v1/reads.pb.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1104
vendor/google.golang.org/genproto/googleapis/genomics/v1/references.pb.go
generated
vendored
Normal file
1104
vendor/google.golang.org/genproto/googleapis/genomics/v1/references.pb.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
3524
vendor/google.golang.org/genproto/googleapis/genomics/v1/variants.pb.go
generated
vendored
Normal file
3524
vendor/google.golang.org/genproto/googleapis/genomics/v1/variants.pb.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
2330
vendor/google.golang.org/genproto/googleapis/genomics/v1alpha2/pipelines.pb.go
generated
vendored
Normal file
2330
vendor/google.golang.org/genproto/googleapis/genomics/v1alpha2/pipelines.pb.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue