diff options
Diffstat (limited to 'src/cmd/compile/internal/escape/utils.go')
-rw-r--r-- | src/cmd/compile/internal/escape/utils.go | 215 |
1 files changed, 215 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/escape/utils.go b/src/cmd/compile/internal/escape/utils.go new file mode 100644 index 0000000..2c6e9bc --- /dev/null +++ b/src/cmd/compile/internal/escape/utils.go @@ -0,0 +1,215 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package escape + +import ( + "cmd/compile/internal/ir" + "cmd/compile/internal/typecheck" +) + +func isSliceSelfAssign(dst, src ir.Node) bool { + // Detect the following special case. + // + // func (b *Buffer) Foo() { + // n, m := ... + // b.buf = b.buf[n:m] + // } + // + // This assignment is a no-op for escape analysis, + // it does not store any new pointers into b that were not already there. + // However, without this special case b will escape, because we assign to OIND/ODOTPTR. + // Here we assume that the statement will not contain calls, + // that is, that order will move any calls to init. + // Otherwise base ONAME value could change between the moments + // when we evaluate it for dst and for src. + + // dst is ONAME dereference. + var dstX ir.Node + switch dst.Op() { + default: + return false + case ir.ODEREF: + dst := dst.(*ir.StarExpr) + dstX = dst.X + case ir.ODOTPTR: + dst := dst.(*ir.SelectorExpr) + dstX = dst.X + } + if dstX.Op() != ir.ONAME { + return false + } + // src is a slice operation. + switch src.Op() { + case ir.OSLICE, ir.OSLICE3, ir.OSLICESTR: + // OK. + case ir.OSLICEARR, ir.OSLICE3ARR: + // Since arrays are embedded into containing object, + // slice of non-pointer array will introduce a new pointer into b that was not already there + // (pointer to b itself). After such assignment, if b contents escape, + // b escapes as well. If we ignore such OSLICEARR, we will conclude + // that b does not escape when b contents do. + // + // Pointer to an array is OK since it's not stored inside b directly. + // For slicing an array (not pointer to array), there is an implicit OADDR. + // We check that to determine non-pointer array slicing. + src := src.(*ir.SliceExpr) + if src.X.Op() == ir.OADDR { + return false + } + default: + return false + } + // slice is applied to ONAME dereference. + var baseX ir.Node + switch base := src.(*ir.SliceExpr).X; base.Op() { + default: + return false + case ir.ODEREF: + base := base.(*ir.StarExpr) + baseX = base.X + case ir.ODOTPTR: + base := base.(*ir.SelectorExpr) + baseX = base.X + } + if baseX.Op() != ir.ONAME { + return false + } + // dst and src reference the same base ONAME. + return dstX.(*ir.Name) == baseX.(*ir.Name) +} + +// isSelfAssign reports whether assignment from src to dst can +// be ignored by the escape analysis as it's effectively a self-assignment. +func isSelfAssign(dst, src ir.Node) bool { + if isSliceSelfAssign(dst, src) { + return true + } + + // Detect trivial assignments that assign back to the same object. + // + // It covers these cases: + // val.x = val.y + // val.x[i] = val.y[j] + // val.x1.x2 = val.x1.y2 + // ... etc + // + // These assignments do not change assigned object lifetime. + + if dst == nil || src == nil || dst.Op() != src.Op() { + return false + } + + // The expression prefix must be both "safe" and identical. + switch dst.Op() { + case ir.ODOT, ir.ODOTPTR: + // Safe trailing accessors that are permitted to differ. + dst := dst.(*ir.SelectorExpr) + src := src.(*ir.SelectorExpr) + return ir.SameSafeExpr(dst.X, src.X) + case ir.OINDEX: + dst := dst.(*ir.IndexExpr) + src := src.(*ir.IndexExpr) + if mayAffectMemory(dst.Index) || mayAffectMemory(src.Index) { + return false + } + return ir.SameSafeExpr(dst.X, src.X) + default: + return false + } +} + +// mayAffectMemory reports whether evaluation of n may affect the program's +// memory state. If the expression can't affect memory state, then it can be +// safely ignored by the escape analysis. +func mayAffectMemory(n ir.Node) bool { + // We may want to use a list of "memory safe" ops instead of generally + // "side-effect free", which would include all calls and other ops that can + // allocate or change global state. For now, it's safer to start with the latter. + // + // We're ignoring things like division by zero, index out of range, + // and nil pointer dereference here. + + // TODO(rsc): It seems like it should be possible to replace this with + // an ir.Any looking for any op that's not the ones in the case statement. + // But that produces changes in the compiled output detected by buildall. + switch n.Op() { + case ir.ONAME, ir.OLITERAL, ir.ONIL: + return false + + case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OMOD: + n := n.(*ir.BinaryExpr) + return mayAffectMemory(n.X) || mayAffectMemory(n.Y) + + case ir.OINDEX: + n := n.(*ir.IndexExpr) + return mayAffectMemory(n.X) || mayAffectMemory(n.Index) + + case ir.OCONVNOP, ir.OCONV: + n := n.(*ir.ConvExpr) + return mayAffectMemory(n.X) + + case ir.OLEN, ir.OCAP, ir.ONOT, ir.OBITNOT, ir.OPLUS, ir.ONEG, ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF: + n := n.(*ir.UnaryExpr) + return mayAffectMemory(n.X) + + case ir.ODOT, ir.ODOTPTR: + n := n.(*ir.SelectorExpr) + return mayAffectMemory(n.X) + + case ir.ODEREF: + n := n.(*ir.StarExpr) + return mayAffectMemory(n.X) + + default: + return true + } +} + +// HeapAllocReason returns the reason the given Node must be heap +// allocated, or the empty string if it doesn't. +func HeapAllocReason(n ir.Node) string { + if n == nil || n.Type() == nil { + return "" + } + + // Parameters are always passed via the stack. + if n.Op() == ir.ONAME { + n := n.(*ir.Name) + if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT { + return "" + } + } + + if n.Type().Size() > ir.MaxStackVarSize { + return "too large for stack" + } + + if (n.Op() == ir.ONEW || n.Op() == ir.OPTRLIT) && n.Type().Elem().Size() > ir.MaxImplicitStackVarSize { + return "too large for stack" + } + + if n.Op() == ir.OCLOSURE && typecheck.ClosureType(n.(*ir.ClosureExpr)).Size() > ir.MaxImplicitStackVarSize { + return "too large for stack" + } + if n.Op() == ir.OMETHVALUE && typecheck.MethodValueType(n.(*ir.SelectorExpr)).Size() > ir.MaxImplicitStackVarSize { + return "too large for stack" + } + + if n.Op() == ir.OMAKESLICE { + n := n.(*ir.MakeExpr) + r := n.Cap + if r == nil { + r = n.Len + } + if !ir.IsSmallIntConst(r) { + return "non-constant size" + } + if t := n.Type(); t.Elem().Size() != 0 && ir.Int64Val(r) > ir.MaxImplicitStackVarSize/t.Elem().Size() { + return "too large for stack" + } + } + + return "" +} |