[VOL-5374] go version upgrade to 1.23.1 and few other package versions upgrade

Signed-off-by: akashreddyk <akash.kankanala@radisys.com>
Change-Id: I50531e8febdc00b335ebbe5a4b1099fc3bf6d5b4
diff --git a/vendor/google.golang.org/protobuf/internal/impl/encode.go b/vendor/google.golang.org/protobuf/internal/impl/encode.go
index 845c67d..b2e2122 100644
--- a/vendor/google.golang.org/protobuf/internal/impl/encode.go
+++ b/vendor/google.golang.org/protobuf/internal/impl/encode.go
@@ -10,7 +10,8 @@
 	"sync/atomic"
 
 	"google.golang.org/protobuf/internal/flags"
-	proto "google.golang.org/protobuf/proto"
+	"google.golang.org/protobuf/internal/protolazy"
+	"google.golang.org/protobuf/proto"
 	piface "google.golang.org/protobuf/runtime/protoiface"
 )
 
@@ -49,8 +50,11 @@
 		return 0
 	}
 	if opts.UseCachedSize() && mi.sizecacheOffset.IsValid() {
-		if size := atomic.LoadInt32(p.Apply(mi.sizecacheOffset).Int32()); size >= 0 {
-			return int(size)
+		// The size cache contains the size + 1, to allow the
+		// zero value to be invalid, while also allowing for a
+		// 0 size to be cached.
+		if size := atomic.LoadInt32(p.Apply(mi.sizecacheOffset).Int32()); size > 0 {
+			return int(size - 1)
 		}
 	}
 	return mi.sizePointerSlow(p, opts)
@@ -60,7 +64,7 @@
 	if flags.ProtoLegacy && mi.isMessageSet {
 		size = sizeMessageSet(mi, p, opts)
 		if mi.sizecacheOffset.IsValid() {
-			atomic.StoreInt32(p.Apply(mi.sizecacheOffset).Int32(), int32(size))
+			atomic.StoreInt32(p.Apply(mi.sizecacheOffset).Int32(), int32(size+1))
 		}
 		return size
 	}
@@ -68,11 +72,39 @@
 		e := p.Apply(mi.extensionOffset).Extensions()
 		size += mi.sizeExtensions(e, opts)
 	}
+
+	var lazy **protolazy.XXX_lazyUnmarshalInfo
+	var presence presence
+	if mi.presenceOffset.IsValid() {
+		presence = p.Apply(mi.presenceOffset).PresenceInfo()
+		if mi.lazyOffset.IsValid() {
+			lazy = p.Apply(mi.lazyOffset).LazyInfoPtr()
+		}
+	}
+
 	for _, f := range mi.orderedCoderFields {
 		if f.funcs.size == nil {
 			continue
 		}
 		fptr := p.Apply(f.offset)
+
+		if f.presenceIndex != noPresence {
+			if !presence.Present(f.presenceIndex) {
+				continue
+			}
+
+			if f.isLazy && fptr.AtomicGetPointer().IsNil() {
+				if lazyFields(opts) {
+					size += (*lazy).SizeField(uint32(f.num))
+					continue
+				} else {
+					mi.lazyUnmarshal(p, f.num)
+				}
+			}
+			size += f.funcs.size(fptr, f, opts)
+			continue
+		}
+
 		if f.isPointer && fptr.Elem().IsNil() {
 			continue
 		}
@@ -84,13 +116,16 @@
 		}
 	}
 	if mi.sizecacheOffset.IsValid() {
-		if size > math.MaxInt32 {
+		if size > (math.MaxInt32 - 1) {
 			// The size is too large for the int32 sizecache field.
 			// We will need to recompute the size when encoding;
 			// unfortunately expensive, but better than invalid output.
-			atomic.StoreInt32(p.Apply(mi.sizecacheOffset).Int32(), -1)
+			atomic.StoreInt32(p.Apply(mi.sizecacheOffset).Int32(), 0)
 		} else {
-			atomic.StoreInt32(p.Apply(mi.sizecacheOffset).Int32(), int32(size))
+			// The size cache contains the size + 1, to allow the
+			// zero value to be invalid, while also allowing for a
+			// 0 size to be cached.
+			atomic.StoreInt32(p.Apply(mi.sizecacheOffset).Int32(), int32(size+1))
 		}
 	}
 	return size
@@ -128,11 +163,52 @@
 			return b, err
 		}
 	}
+
+	var lazy **protolazy.XXX_lazyUnmarshalInfo
+	var presence presence
+	if mi.presenceOffset.IsValid() {
+		presence = p.Apply(mi.presenceOffset).PresenceInfo()
+		if mi.lazyOffset.IsValid() {
+			lazy = p.Apply(mi.lazyOffset).LazyInfoPtr()
+		}
+	}
+
 	for _, f := range mi.orderedCoderFields {
 		if f.funcs.marshal == nil {
 			continue
 		}
 		fptr := p.Apply(f.offset)
+
+		if f.presenceIndex != noPresence {
+			if !presence.Present(f.presenceIndex) {
+				continue
+			}
+			if f.isLazy {
+				// Be careful, this field needs to be read atomically, like for a get
+				if f.isPointer && fptr.AtomicGetPointer().IsNil() {
+					if lazyFields(opts) {
+						b, _ = (*lazy).AppendField(b, uint32(f.num))
+						continue
+					} else {
+						mi.lazyUnmarshal(p, f.num)
+					}
+				}
+
+				b, err = f.funcs.marshal(b, fptr, f, opts)
+				if err != nil {
+					return b, err
+				}
+				continue
+			} else if f.isPointer && fptr.Elem().IsNil() {
+				continue
+			}
+			b, err = f.funcs.marshal(b, fptr, f, opts)
+			if err != nil {
+				return b, err
+			}
+			continue
+		}
+
 		if f.isPointer && fptr.Elem().IsNil() {
 			continue
 		}
@@ -149,6 +225,22 @@
 	return b, nil
 }
 
+// fullyLazyExtensions returns true if we should attempt to keep extensions lazy over size and marshal.
+func fullyLazyExtensions(opts marshalOptions) bool {
+	// When deterministic marshaling is requested, force an unmarshal for lazy
+	// extensions to produce a deterministic result, instead of passing through
+	// bytes lazily that may or may not match what Go Protobuf would produce.
+	return opts.flags&piface.MarshalDeterministic == 0
+}
+
+// lazyFields returns true if we should attempt to keep fields lazy over size and marshal.
+func lazyFields(opts marshalOptions) bool {
+	// When deterministic marshaling is requested, force an unmarshal for lazy
+	// fields to produce a deterministic result, instead of passing through
+	// bytes lazily that may or may not match what Go Protobuf would produce.
+	return opts.flags&piface.MarshalDeterministic == 0
+}
+
 func (mi *MessageInfo) sizeExtensions(ext *map[int32]ExtensionField, opts marshalOptions) (n int) {
 	if ext == nil {
 		return 0
@@ -158,6 +250,14 @@
 		if xi.funcs.size == nil {
 			continue
 		}
+		if fullyLazyExtensions(opts) {
+			// Don't expand the extension, instead use the buffer to calculate size
+			if lb := x.lazyBuffer(); lb != nil {
+				// We got hold of the buffer, so it's still lazy.
+				n += len(lb)
+				continue
+			}
+		}
 		n += xi.funcs.size(x.Value(), xi.tagsize, opts)
 	}
 	return n
@@ -176,6 +276,13 @@
 		var err error
 		for _, x := range *ext {
 			xi := getExtensionFieldInfo(x.Type())
+			if fullyLazyExtensions(opts) {
+				// Don't expand the extension if it's still in wire format, instead use the buffer content.
+				if lb := x.lazyBuffer(); lb != nil {
+					b = append(b, lb...)
+					continue
+				}
+			}
 			b, err = xi.funcs.marshal(b, x.Value(), xi.wiretag, opts)
 		}
 		return b, err
@@ -191,6 +298,13 @@
 		for _, k := range keys {
 			x := (*ext)[int32(k)]
 			xi := getExtensionFieldInfo(x.Type())
+			if fullyLazyExtensions(opts) {
+				// Don't expand the extension if it's still in wire format, instead use the buffer content.
+				if lb := x.lazyBuffer(); lb != nil {
+					b = append(b, lb...)
+					continue
+				}
+			}
 			b, err = xi.funcs.marshal(b, x.Value(), xi.wiretag, opts)
 			if err != nil {
 				return b, err