mirror of
https://github.com/adnanh/webhook.git
synced 2025-06-26 14:28:31 +00:00
Update gopkg.in/yaml.v2 indirect dependency to v2.4.0
This commit is contained in:
parent
853030346b
commit
16c49415ab
17 changed files with 733 additions and 336 deletions
292
vendor/gopkg.in/yaml.v2/decode.go
generated
vendored
292
vendor/gopkg.in/yaml.v2/decode.go
generated
vendored
|
@ -4,6 +4,7 @@ import (
|
|||
"encoding"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"reflect"
|
||||
"strconv"
|
||||
|
@ -22,19 +23,22 @@ type node struct {
|
|||
kind int
|
||||
line, column int
|
||||
tag string
|
||||
value string
|
||||
implicit bool
|
||||
children []*node
|
||||
anchors map[string]*node
|
||||
// For an alias node, alias holds the resolved alias.
|
||||
alias *node
|
||||
value string
|
||||
implicit bool
|
||||
children []*node
|
||||
anchors map[string]*node
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Parser, produces a node tree out of a libyaml event stream.
|
||||
|
||||
type parser struct {
|
||||
parser yaml_parser_t
|
||||
event yaml_event_t
|
||||
doc *node
|
||||
parser yaml_parser_t
|
||||
event yaml_event_t
|
||||
doc *node
|
||||
doneInit bool
|
||||
}
|
||||
|
||||
func newParser(b []byte) *parser {
|
||||
|
@ -42,21 +46,30 @@ func newParser(b []byte) *parser {
|
|||
if !yaml_parser_initialize(&p.parser) {
|
||||
panic("failed to initialize YAML emitter")
|
||||
}
|
||||
|
||||
if len(b) == 0 {
|
||||
b = []byte{'\n'}
|
||||
}
|
||||
|
||||
yaml_parser_set_input_string(&p.parser, b)
|
||||
|
||||
p.skip()
|
||||
if p.event.typ != yaml_STREAM_START_EVENT {
|
||||
panic("expected stream start event, got " + strconv.Itoa(int(p.event.typ)))
|
||||
}
|
||||
p.skip()
|
||||
return &p
|
||||
}
|
||||
|
||||
func newParserFromReader(r io.Reader) *parser {
|
||||
p := parser{}
|
||||
if !yaml_parser_initialize(&p.parser) {
|
||||
panic("failed to initialize YAML emitter")
|
||||
}
|
||||
yaml_parser_set_input_reader(&p.parser, r)
|
||||
return &p
|
||||
}
|
||||
|
||||
func (p *parser) init() {
|
||||
if p.doneInit {
|
||||
return
|
||||
}
|
||||
p.expect(yaml_STREAM_START_EVENT)
|
||||
p.doneInit = true
|
||||
}
|
||||
|
||||
func (p *parser) destroy() {
|
||||
if p.event.typ != yaml_NO_EVENT {
|
||||
yaml_event_delete(&p.event)
|
||||
|
@ -64,16 +77,35 @@ func (p *parser) destroy() {
|
|||
yaml_parser_delete(&p.parser)
|
||||
}
|
||||
|
||||
func (p *parser) skip() {
|
||||
if p.event.typ != yaml_NO_EVENT {
|
||||
if p.event.typ == yaml_STREAM_END_EVENT {
|
||||
failf("attempted to go past the end of stream; corrupted value?")
|
||||
// expect consumes an event from the event stream and
|
||||
// checks that it's of the expected type.
|
||||
func (p *parser) expect(e yaml_event_type_t) {
|
||||
if p.event.typ == yaml_NO_EVENT {
|
||||
if !yaml_parser_parse(&p.parser, &p.event) {
|
||||
p.fail()
|
||||
}
|
||||
yaml_event_delete(&p.event)
|
||||
}
|
||||
if p.event.typ == yaml_STREAM_END_EVENT {
|
||||
failf("attempted to go past the end of stream; corrupted value?")
|
||||
}
|
||||
if p.event.typ != e {
|
||||
p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ)
|
||||
p.fail()
|
||||
}
|
||||
yaml_event_delete(&p.event)
|
||||
p.event.typ = yaml_NO_EVENT
|
||||
}
|
||||
|
||||
// peek peeks at the next event in the event stream,
|
||||
// puts the results into p.event and returns the event type.
|
||||
func (p *parser) peek() yaml_event_type_t {
|
||||
if p.event.typ != yaml_NO_EVENT {
|
||||
return p.event.typ
|
||||
}
|
||||
if !yaml_parser_parse(&p.parser, &p.event) {
|
||||
p.fail()
|
||||
}
|
||||
return p.event.typ
|
||||
}
|
||||
|
||||
func (p *parser) fail() {
|
||||
|
@ -81,6 +113,10 @@ func (p *parser) fail() {
|
|||
var line int
|
||||
if p.parser.problem_mark.line != 0 {
|
||||
line = p.parser.problem_mark.line
|
||||
// Scanner errors don't iterate line before returning error
|
||||
if p.parser.error == yaml_SCANNER_ERROR {
|
||||
line++
|
||||
}
|
||||
} else if p.parser.context_mark.line != 0 {
|
||||
line = p.parser.context_mark.line
|
||||
}
|
||||
|
@ -103,7 +139,8 @@ func (p *parser) anchor(n *node, anchor []byte) {
|
|||
}
|
||||
|
||||
func (p *parser) parse() *node {
|
||||
switch p.event.typ {
|
||||
p.init()
|
||||
switch p.peek() {
|
||||
case yaml_SCALAR_EVENT:
|
||||
return p.scalar()
|
||||
case yaml_ALIAS_EVENT:
|
||||
|
@ -118,7 +155,7 @@ func (p *parser) parse() *node {
|
|||
// Happens when attempting to decode an empty buffer.
|
||||
return nil
|
||||
default:
|
||||
panic("attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ)))
|
||||
panic("attempted to parse unknown event: " + p.event.typ.String())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,19 +171,20 @@ func (p *parser) document() *node {
|
|||
n := p.node(documentNode)
|
||||
n.anchors = make(map[string]*node)
|
||||
p.doc = n
|
||||
p.skip()
|
||||
p.expect(yaml_DOCUMENT_START_EVENT)
|
||||
n.children = append(n.children, p.parse())
|
||||
if p.event.typ != yaml_DOCUMENT_END_EVENT {
|
||||
panic("expected end of document event but got " + strconv.Itoa(int(p.event.typ)))
|
||||
}
|
||||
p.skip()
|
||||
p.expect(yaml_DOCUMENT_END_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) alias() *node {
|
||||
n := p.node(aliasNode)
|
||||
n.value = string(p.event.anchor)
|
||||
p.skip()
|
||||
n.alias = p.doc.anchors[n.value]
|
||||
if n.alias == nil {
|
||||
failf("unknown anchor '%s' referenced", n.value)
|
||||
}
|
||||
p.expect(yaml_ALIAS_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
|
@ -156,29 +194,29 @@ func (p *parser) scalar() *node {
|
|||
n.tag = string(p.event.tag)
|
||||
n.implicit = p.event.implicit
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
p.expect(yaml_SCALAR_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) sequence() *node {
|
||||
n := p.node(sequenceNode)
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
for p.event.typ != yaml_SEQUENCE_END_EVENT {
|
||||
p.expect(yaml_SEQUENCE_START_EVENT)
|
||||
for p.peek() != yaml_SEQUENCE_END_EVENT {
|
||||
n.children = append(n.children, p.parse())
|
||||
}
|
||||
p.skip()
|
||||
p.expect(yaml_SEQUENCE_END_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) mapping() *node {
|
||||
n := p.node(mappingNode)
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
for p.event.typ != yaml_MAPPING_END_EVENT {
|
||||
p.expect(yaml_MAPPING_START_EVENT)
|
||||
for p.peek() != yaml_MAPPING_END_EVENT {
|
||||
n.children = append(n.children, p.parse(), p.parse())
|
||||
}
|
||||
p.skip()
|
||||
p.expect(yaml_MAPPING_END_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
|
@ -187,10 +225,14 @@ func (p *parser) mapping() *node {
|
|||
|
||||
type decoder struct {
|
||||
doc *node
|
||||
aliases map[string]bool
|
||||
aliases map[*node]bool
|
||||
mapType reflect.Type
|
||||
terrors []string
|
||||
strict bool
|
||||
|
||||
decodeCount int
|
||||
aliasCount int
|
||||
aliasDepth int
|
||||
}
|
||||
|
||||
var (
|
||||
|
@ -198,11 +240,13 @@ var (
|
|||
durationType = reflect.TypeOf(time.Duration(0))
|
||||
defaultMapType = reflect.TypeOf(map[interface{}]interface{}{})
|
||||
ifaceType = defaultMapType.Elem()
|
||||
timeType = reflect.TypeOf(time.Time{})
|
||||
ptrTimeType = reflect.TypeOf(&time.Time{})
|
||||
)
|
||||
|
||||
func newDecoder(strict bool) *decoder {
|
||||
d := &decoder{mapType: defaultMapType, strict: strict}
|
||||
d.aliases = make(map[string]bool)
|
||||
d.aliases = make(map[*node]bool)
|
||||
return d
|
||||
}
|
||||
|
||||
|
@ -251,7 +295,7 @@ func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) {
|
|||
//
|
||||
// If n holds a null value, prepare returns before doing anything.
|
||||
func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
|
||||
if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "" && n.implicit) {
|
||||
if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) {
|
||||
return out, false, false
|
||||
}
|
||||
again := true
|
||||
|
@ -274,7 +318,43 @@ func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unm
|
|||
return out, false, false
|
||||
}
|
||||
|
||||
const (
|
||||
// 400,000 decode operations is ~500kb of dense object declarations, or
|
||||
// ~5kb of dense object declarations with 10000% alias expansion
|
||||
alias_ratio_range_low = 400000
|
||||
|
||||
// 4,000,000 decode operations is ~5MB of dense object declarations, or
|
||||
// ~4.5MB of dense object declarations with 10% alias expansion
|
||||
alias_ratio_range_high = 4000000
|
||||
|
||||
// alias_ratio_range is the range over which we scale allowed alias ratios
|
||||
alias_ratio_range = float64(alias_ratio_range_high - alias_ratio_range_low)
|
||||
)
|
||||
|
||||
func allowedAliasRatio(decodeCount int) float64 {
|
||||
switch {
|
||||
case decodeCount <= alias_ratio_range_low:
|
||||
// allow 99% to come from alias expansion for small-to-medium documents
|
||||
return 0.99
|
||||
case decodeCount >= alias_ratio_range_high:
|
||||
// allow 10% to come from alias expansion for very large documents
|
||||
return 0.10
|
||||
default:
|
||||
// scale smoothly from 99% down to 10% over the range.
|
||||
// this maps to 396,000 - 400,000 allowed alias-driven decodes over the range.
|
||||
// 400,000 decode operations is ~100MB of allocations in worst-case scenarios (single-item maps).
|
||||
return 0.99 - 0.89*(float64(decodeCount-alias_ratio_range_low)/alias_ratio_range)
|
||||
}
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) {
|
||||
d.decodeCount++
|
||||
if d.aliasDepth > 0 {
|
||||
d.aliasCount++
|
||||
}
|
||||
if d.aliasCount > 100 && d.decodeCount > 1000 && float64(d.aliasCount)/float64(d.decodeCount) > allowedAliasRatio(d.decodeCount) {
|
||||
failf("document contains excessive aliasing")
|
||||
}
|
||||
switch n.kind {
|
||||
case documentNode:
|
||||
return d.document(n, out)
|
||||
|
@ -308,16 +388,15 @@ func (d *decoder) document(n *node, out reflect.Value) (good bool) {
|
|||
}
|
||||
|
||||
func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
|
||||
an, ok := d.doc.anchors[n.value]
|
||||
if !ok {
|
||||
failf("unknown anchor '%s' referenced", n.value)
|
||||
}
|
||||
if d.aliases[n.value] {
|
||||
if d.aliases[n] {
|
||||
// TODO this could actually be allowed in some circumstances.
|
||||
failf("anchor '%s' value contains itself", n.value)
|
||||
}
|
||||
d.aliases[n.value] = true
|
||||
good = d.unmarshal(an, out)
|
||||
delete(d.aliases, n.value)
|
||||
d.aliases[n] = true
|
||||
d.aliasDepth++
|
||||
good = d.unmarshal(n.alias, out)
|
||||
d.aliasDepth--
|
||||
delete(d.aliases, n)
|
||||
return good
|
||||
}
|
||||
|
||||
|
@ -329,7 +408,7 @@ func resetMap(out reflect.Value) {
|
|||
}
|
||||
}
|
||||
|
||||
func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
||||
func (d *decoder) scalar(n *node, out reflect.Value) bool {
|
||||
var tag string
|
||||
var resolved interface{}
|
||||
if n.tag == "" && !n.implicit {
|
||||
|
@ -353,9 +432,26 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
|||
}
|
||||
return true
|
||||
}
|
||||
if s, ok := resolved.(string); ok && out.CanAddr() {
|
||||
if u, ok := out.Addr().Interface().(encoding.TextUnmarshaler); ok {
|
||||
err := u.UnmarshalText([]byte(s))
|
||||
if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
|
||||
// We've resolved to exactly the type we want, so use that.
|
||||
out.Set(resolvedv)
|
||||
return true
|
||||
}
|
||||
// Perhaps we can use the value as a TextUnmarshaler to
|
||||
// set its value.
|
||||
if out.CanAddr() {
|
||||
u, ok := out.Addr().Interface().(encoding.TextUnmarshaler)
|
||||
if ok {
|
||||
var text []byte
|
||||
if tag == yaml_BINARY_TAG {
|
||||
text = []byte(resolved.(string))
|
||||
} else {
|
||||
// We let any value be unmarshaled into TextUnmarshaler.
|
||||
// That might be more lax than we'd like, but the
|
||||
// TextUnmarshaler itself should bowl out any dubious values.
|
||||
text = []byte(n.value)
|
||||
}
|
||||
err := u.UnmarshalText(text)
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
|
@ -366,46 +462,54 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
|||
case reflect.String:
|
||||
if tag == yaml_BINARY_TAG {
|
||||
out.SetString(resolved.(string))
|
||||
good = true
|
||||
} else if resolved != nil {
|
||||
return true
|
||||
}
|
||||
if resolved != nil {
|
||||
out.SetString(n.value)
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case reflect.Interface:
|
||||
if resolved == nil {
|
||||
out.Set(reflect.Zero(out.Type()))
|
||||
} else if tag == yaml_TIMESTAMP_TAG {
|
||||
// It looks like a timestamp but for backward compatibility
|
||||
// reasons we set it as a string, so that code that unmarshals
|
||||
// timestamp-like values into interface{} will continue to
|
||||
// see a string and not a time.Time.
|
||||
// TODO(v3) Drop this.
|
||||
out.Set(reflect.ValueOf(n.value))
|
||||
} else {
|
||||
out.Set(reflect.ValueOf(resolved))
|
||||
}
|
||||
good = true
|
||||
return true
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
if !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case int64:
|
||||
if !out.OverflowInt(resolved) {
|
||||
out.SetInt(resolved)
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case uint64:
|
||||
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case float64:
|
||||
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case string:
|
||||
if out.Type() == durationType {
|
||||
d, err := time.ParseDuration(resolved)
|
||||
if err == nil {
|
||||
out.SetInt(int64(d))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -414,44 +518,49 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
|||
case int:
|
||||
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case int64:
|
||||
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case uint64:
|
||||
if !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case float64:
|
||||
if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
}
|
||||
case reflect.Bool:
|
||||
switch resolved := resolved.(type) {
|
||||
case bool:
|
||||
out.SetBool(resolved)
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
return true
|
||||
case int64:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
return true
|
||||
case uint64:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
return true
|
||||
case float64:
|
||||
out.SetFloat(resolved)
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case reflect.Struct:
|
||||
if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
|
||||
out.Set(resolvedv)
|
||||
return true
|
||||
}
|
||||
case reflect.Ptr:
|
||||
if out.Type().Elem() == reflect.TypeOf(resolved) {
|
||||
|
@ -459,13 +568,11 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
|||
elem := reflect.New(out.Type().Elem())
|
||||
elem.Elem().Set(reflect.ValueOf(resolved))
|
||||
out.Set(elem)
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
}
|
||||
if !good {
|
||||
d.terror(n, tag, out)
|
||||
}
|
||||
return good
|
||||
d.terror(n, tag, out)
|
||||
return false
|
||||
}
|
||||
|
||||
func settableValueOf(i interface{}) reflect.Value {
|
||||
|
@ -482,6 +589,10 @@ func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
|
|||
switch out.Kind() {
|
||||
case reflect.Slice:
|
||||
out.Set(reflect.MakeSlice(out.Type(), l, l))
|
||||
case reflect.Array:
|
||||
if l != out.Len() {
|
||||
failf("invalid array: want %d elements but got %d", out.Len(), l)
|
||||
}
|
||||
case reflect.Interface:
|
||||
// No type hints. Will have to use a generic sequence.
|
||||
iface = out
|
||||
|
@ -500,7 +611,9 @@ func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
|
|||
j++
|
||||
}
|
||||
}
|
||||
out.Set(out.Slice(0, j))
|
||||
if out.Kind() != reflect.Array {
|
||||
out.Set(out.Slice(0, j))
|
||||
}
|
||||
if iface.IsValid() {
|
||||
iface.Set(out)
|
||||
}
|
||||
|
@ -561,7 +674,7 @@ func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
|
|||
}
|
||||
e := reflect.New(et).Elem()
|
||||
if d.unmarshal(n.children[i+1], e) {
|
||||
out.SetMapIndex(k, e)
|
||||
d.setMapIndex(n.children[i+1], out, k, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -569,6 +682,14 @@ func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
|
|||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) setMapIndex(n *node, out, k, v reflect.Value) {
|
||||
if d.strict && out.MapIndex(k) != zeroValue {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.line+1, k.Interface()))
|
||||
return
|
||||
}
|
||||
out.SetMapIndex(k, v)
|
||||
}
|
||||
|
||||
func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) {
|
||||
outt := out.Type()
|
||||
if outt.Elem() != mapItemType {
|
||||
|
@ -616,6 +737,10 @@ func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
|
|||
elemType = inlineMap.Type().Elem()
|
||||
}
|
||||
|
||||
var doneFields []bool
|
||||
if d.strict {
|
||||
doneFields = make([]bool, len(sinfo.FieldsList))
|
||||
}
|
||||
for i := 0; i < l; i += 2 {
|
||||
ni := n.children[i]
|
||||
if isMerge(ni) {
|
||||
|
@ -626,6 +751,13 @@ func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
|
|||
continue
|
||||
}
|
||||
if info, ok := sinfo.FieldsMap[name.String()]; ok {
|
||||
if d.strict {
|
||||
if doneFields[info.Id] {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.line+1, name.String(), out.Type()))
|
||||
continue
|
||||
}
|
||||
doneFields[info.Id] = true
|
||||
}
|
||||
var field reflect.Value
|
||||
if info.Inline == nil {
|
||||
field = out.Field(info.Num)
|
||||
|
@ -639,9 +771,9 @@ func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
|
|||
}
|
||||
value := reflect.New(elemType).Elem()
|
||||
d.unmarshal(n.children[i+1], value)
|
||||
inlineMap.SetMapIndex(name, value)
|
||||
d.setMapIndex(n.children[i+1], inlineMap, name, value)
|
||||
} else if d.strict {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in struct %s", n.line+1, name.String(), out.Type()))
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.line+1, name.String(), out.Type()))
|
||||
}
|
||||
}
|
||||
return true
|
||||
|
@ -656,8 +788,7 @@ func (d *decoder) merge(n *node, out reflect.Value) {
|
|||
case mappingNode:
|
||||
d.unmarshal(n, out)
|
||||
case aliasNode:
|
||||
an, ok := d.doc.anchors[n.value]
|
||||
if ok && an.kind != mappingNode {
|
||||
if n.alias != nil && n.alias.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
d.unmarshal(n, out)
|
||||
|
@ -666,8 +797,7 @@ func (d *decoder) merge(n *node, out reflect.Value) {
|
|||
for i := len(n.children) - 1; i >= 0; i-- {
|
||||
ni := n.children[i]
|
||||
if ni.kind == aliasNode {
|
||||
an, ok := d.doc.anchors[ni.value]
|
||||
if ok && an.kind != mappingNode {
|
||||
if ni.alias != nil && ni.alias.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
} else if ni.kind != mappingNode {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue