Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
507bfbc
nogo: add more staticcheck analyzers
tamird Jan 8, 2026
d171aaa
Move and explain QF1001 suppression
tamird Feb 6, 2026
0fcc03b
Fix QF1002 violations and drop suppression
tamird Feb 6, 2026
9439810
Fix QF1003 violations and drop suppression
tamird Feb 6, 2026
9441825
Fix QF1004 violations and drop suppression
tamird Feb 7, 2026
141a317
Fix QF1005 violations and drop suppression
tamird Feb 7, 2026
163abb2
Fix QF1006 violations and drop suppression
tamird Feb 7, 2026
4fc09fa
Fix QF1007 violations and drop suppression
tamird Feb 7, 2026
2744619
Move and explain QF1008 suppression
tamird Feb 9, 2026
50329dc
Fix QF1012 violations and drop suppression
tamird Feb 7, 2026
0392729
Fix S1000 violations and drop suppression
tamird Feb 7, 2026
20699f5
Fix S1002 violations and drop suppression
tamird Feb 7, 2026
9c52ab3
Fix S1003 violations and drop suppression
tamird Feb 7, 2026
6252deb
Fix S1004 violations and drop suppression
tamird Feb 7, 2026
56a6075
Fix S1005 violations and drop suppression
tamird Feb 7, 2026
b5cadea
Fix S1007 violations and drop suppression
tamird Feb 7, 2026
ea0f2f4
Fix S1008 violations and drop suppression
tamird Feb 8, 2026
bf1fb63
Fix S1009 violations and drop suppression
tamird Feb 8, 2026
89cebe8
Fix S1011 violations and drop suppression
tamird Feb 8, 2026
7ba811f
Fix S1012 violations and drop suppression
tamird Feb 8, 2026
88ed0b2
Fix S1016 violations and drop suppression
tamird Feb 8, 2026
9a64f2f
Fix S1019 violations and drop suppression
tamird Feb 8, 2026
0ac4d1d
Fix S1020 violations and drop suppression
tamird Feb 8, 2026
7508cd5
Fix S1021 violations and drop suppression
tamird Feb 8, 2026
71a9600
Fix S1023 violations and drop suppression
tamird Feb 8, 2026
1f97fa1
Fix S1024 violations and drop suppression
tamird Feb 8, 2026
38e1236
Fix S1025 violations and drop suppression
tamird Feb 8, 2026
a42181a
Fix S1030 violations and drop suppression
tamird Feb 8, 2026
e8af5cc
Fix S1033 violations and drop suppression
tamird Feb 8, 2026
3f5d448
Fix S1034 violations and drop suppression
tamird Feb 8, 2026
61eddce
Fix S1039 violations and drop suppression
tamird Feb 8, 2026
a50651c
Fix S1040 violations and drop suppression
tamird Feb 8, 2026
2ba75b9
Merge pull request #12598 from tamird:nogo-more-checks
gvisor-bot Feb 20, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 75 additions & 0 deletions nogo.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -267,3 +267,78 @@ analyzers:
suppress:
- "comment on exported type Translation" # Intentional.
- "comment on exported type PinnedRange" # Intentional.

QF1001: # Apply De Morgan’s law
# Controversial.
internal:
exclude: [".*"]
QF1008: # Omit embedded fields from selector expression
# Controversial.
internal:
exclude: [".*"]
QF1002: # Convert untagged switch to tagged switch
# disabled for now due to existing violations.
internal:
exclude: [".*"]
QF1003: # Convert if/else-if chain to tagged switch
# disabled for now due to existing violations.
internal:
exclude: [".*"]
QF1004: # Use strings.ReplaceAll instead of Replace(..., -1)
# disabled for now due to existing violations.
internal:
exclude: [".*"]
QF1005: # Expand call to math.Pow
# disabled for now due to existing violations.
internal:
exclude: [".*"]
QF1011: # Omit redundant type from variable declaration
# disabled for now due to existing violations.
internal:
exclude: [".*"]
QF1012: # Use `fmt.Fprintf(x, ...)` instead of `x.Write(fmt.Sprintf(...))`
# disabled for now due to existing violations.
generated:
exclude: [".*"]
internal:
exclude: [".*"]
S1000: # Use plain channel ops instead of single-case select
# disabled for now due to existing violations.
internal:
exclude: [".*"]
S1001: # should use copy(to[:], from) instead of a loop
# disabled for now due to existing violations.
internal:
exclude: [".*"]
S1003: # Replace index+len check with strings.Contains
# disabled for now due to existing violations.
internal:
exclude: [".*"]
S1007: # Simplify regular expression by using raw string literal
# disabled for now due to existing violations.
internal:
exclude: [".*"]
S1008: # Simplify returning boolean expression
# disabled for now due to existing violations.
internal:
exclude: [".*"]
S1011: # Use append directly instead of looped appends
# disabled for now due to existing violations.
internal:
exclude: [".*"]
S1023: # Omit redundant control flow
# disabled for now due to existing violations.
internal:
exclude: [".*"]
S1025: # Drop redundant fmt.Sprintf for strings
# disabled for now due to existing violations.
internal:
exclude: [".*"]
S1038: # should use log.Printf(...) instead of log.Println(fmt.Sprintf(...))
# disabled for now due to existing violations.
internal:
exclude: [".*"]
S1039: # Unnecessary use of `fmt.Sprint`
# disabled for now due to existing violations.
internal:
exclude: [".*"]
6 changes: 1 addition & 5 deletions pkg/abi/linux/fuse.go
Original file line number Diff line number Diff line change
Expand Up @@ -977,11 +977,7 @@ func (r *FUSEDirents) MarshalBytes(buf []byte) []byte {

// UnmarshalBytes deserializes FUSEDirents from the src buffer.
func (r *FUSEDirents) UnmarshalBytes(src []byte) []byte {
for {
if len(src) <= (*FUSEDirentMeta)(nil).SizeBytes() {
break
}

for len(src) > (*FUSEDirentMeta)(nil).SizeBytes() {
// Its unclear how many dirents there are in src. Each dirent is dynamically
// sized and so we can't make assumptions about how many dirents we can allocate.
if r.Dirents == nil {
Expand Down
8 changes: 4 additions & 4 deletions pkg/aio/aio_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ func testRead(t *testing.T, newQueue func(cap int) (Queue, error)) {
done += chunkSize
}
}
if bytes.Compare(data, buf) != 0 {
if !bytes.Equal(data, buf) {
t.Errorf("bytes differ")
}
}
Expand Down Expand Up @@ -153,7 +153,7 @@ func testReadv(t *testing.T, newQueue func(cap int) (Queue, error)) {
done += 2 * chunkSize
}
}
if bytes.Compare(data, buf) != 0 {
if !bytes.Equal(data, buf) {
t.Errorf("bytes differ")
}
}
Expand Down Expand Up @@ -213,7 +213,7 @@ func testWrite(t *testing.T, newQueue func(cap int) (Queue, error)) {
if n, err := io.ReadFull(testFile, buf); err != nil {
t.Fatalf("failed to read temp file after %d bytes: %v", n, err)
}
if bytes.Compare(data, buf) != 0 {
if !bytes.Equal(data, buf) {
t.Errorf("bytes differ")
}
}
Expand Down Expand Up @@ -286,7 +286,7 @@ func testWritev(t *testing.T, newQueue func(cap int) (Queue, error)) {
if n, err := io.ReadFull(testFile, buf); err != nil {
t.Fatalf("failed to read temp file after %d bytes: %v", n, err)
}
if bytes.Compare(data, buf) != 0 {
if !bytes.Equal(data, buf) {
t.Errorf("bytes differ")
}
}
Expand Down
29 changes: 14 additions & 15 deletions pkg/bpf/decoder.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ func decode(inst Instruction, line int, w *bytes.Buffer) error {
case Ldx:
err = decodeLdx(inst, w)
case St:
w.WriteString(fmt.Sprintf("M[%v] <- A", inst.K))
fmt.Fprintf(w, "M[%v] <- A", inst.K)
case Stx:
w.WriteString(fmt.Sprintf("M[%v] <- X", inst.K))
fmt.Fprintf(w, "M[%v] <- X", inst.K)
case Alu:
err = decodeAlu(inst, w)
case Jmp:
Expand All @@ -77,21 +77,21 @@ func decodeLd(inst Instruction, w *bytes.Buffer) error {

switch inst.OpCode & loadModeMask {
case Imm:
w.WriteString(fmt.Sprintf("%v", inst.K))
fmt.Fprintf(w, "%v", inst.K)
case Abs:
w.WriteString(fmt.Sprintf("P[%v:", inst.K))
fmt.Fprintf(w, "P[%v:", inst.K)
if err := decodeLdSize(inst, w); err != nil {
return err
}
w.WriteString("]")
case Ind:
w.WriteString(fmt.Sprintf("P[X+%v:", inst.K))
fmt.Fprintf(w, "P[X+%v:", inst.K)
if err := decodeLdSize(inst, w); err != nil {
return err
}
w.WriteString("]")
case Mem:
w.WriteString(fmt.Sprintf("M[%v]", inst.K))
fmt.Fprintf(w, "M[%v]", inst.K)
case Len:
w.WriteString("len")
default:
Expand Down Expand Up @@ -120,13 +120,13 @@ func decodeLdx(inst Instruction, w *bytes.Buffer) error {

switch inst.OpCode & loadModeMask {
case Imm:
w.WriteString(fmt.Sprintf("%v", inst.K))
fmt.Fprintf(w, "%v", inst.K)
case Mem:
w.WriteString(fmt.Sprintf("M[%v]", inst.K))
fmt.Fprintf(w, "M[%v]", inst.K)
case Len:
w.WriteString("len")
case Msh:
w.WriteString(fmt.Sprintf("4*(P[%v:1]&0xf)", inst.K))
fmt.Fprintf(w, "4*(P[%v:1]&0xf)", inst.K)
default:
return fmt.Errorf("invalid BPF LDX instruction: %v", linux.BPFInstruction(inst))
}
Expand Down Expand Up @@ -172,7 +172,7 @@ func decodeAlu(inst Instruction, w *bytes.Buffer) error {
func decodeSource(inst Instruction, w *bytes.Buffer) error {
switch inst.OpCode & srcAluJmpMask {
case K:
w.WriteString(fmt.Sprintf("%v", inst.K))
fmt.Fprintf(w, "%v", inst.K)
case X:
w.WriteString("X")
default:
Expand Down Expand Up @@ -205,10 +205,9 @@ func decodeJmp(inst Instruction, line int, w *bytes.Buffer) error {
if err := decodeSource(inst, w); err != nil {
return err
}
w.WriteString(
fmt.Sprintf(") ? %s : %s",
printJmpTarget(uint32(inst.JumpIfTrue), line),
printJmpTarget(uint32(inst.JumpIfFalse), line)))
fmt.Fprintf(w, ") ? %s : %s",
printJmpTarget(uint32(inst.JumpIfTrue), line),
printJmpTarget(uint32(inst.JumpIfFalse), line))
}
return nil
}
Expand All @@ -227,7 +226,7 @@ func decodeRet(inst Instruction, w *bytes.Buffer) error {
code := inst.OpCode & srcRetMask
switch code {
case K:
w.WriteString(fmt.Sprintf("%v", inst.K))
fmt.Fprintf(w, "%v", inst.K)
case A:
w.WriteString("A")
default:
Expand Down
2 changes: 1 addition & 1 deletion pkg/context/context.go
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ func (nt *NoTask) Interrupt() {

// Interrupted implements Blocker.Interrupted.
func (nt *NoTask) Interrupted() bool {
return nt.cancel != nil && len(nt.cancel) > 0
return len(nt.cancel) > 0
}

// Killed implements Blocker.Killed.
Expand Down
5 changes: 3 additions & 2 deletions pkg/cpuid/cpuid.go
Original file line number Diff line number Diff line change
Expand Up @@ -233,9 +233,10 @@ func readHWCap(auxvFilepath string) (hwCap, error) {
for i := 0; i < l; i++ {
tag := binary.LittleEndian.Uint64(auxv[i*16:])
val := binary.LittleEndian.Uint64(auxv[i*16+8:])
if tag == _AT_HWCAP {
switch tag {
case _AT_HWCAP:
c.hwCap1 = val
} else if tag == _AT_HWCAP2 {
case _AT_HWCAP2:
c.hwCap2 = val
}

Expand Down
5 changes: 1 addition & 4 deletions pkg/fspath/fspath.go
Original file line number Diff line number Diff line change
Expand Up @@ -161,10 +161,7 @@ func (it Iterator) Next() Iterator {
// aren't at the end of the path, there is definitely another path
// component.
i := it.end + 1
for {
if it.partialPathname[i] != pathSep {
break
}
for it.partialPathname[i] == pathSep {
i++
}
nextPartialPathname := it.partialPathname[i:]
Expand Down
4 changes: 2 additions & 2 deletions pkg/lisafs/testsuite/testsuite.go
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ func readFDAndCmp(ctx context.Context, t *testing.T, fdLisa lisafs.ClientFD, off
t.Errorf("partial read: buf size = %d, read = %d", len(want), n)
return
}
if bytes.Compare(buf, want) != 0 {
if !bytes.Equal(buf, want) {
t.Errorf("bytes read differ from what was expected: want = %v, got = %v", want, buf)
}
}
Expand Down Expand Up @@ -365,7 +365,7 @@ func testRegularFileIO(ctx context.Context, t *testing.T, tester Tester, root li
t.Errorf("host read failed: %v", err)
} else if n != len(hostReadData) {
t.Errorf("partial read: buf size = %d, read = %d", len(hostReadData), n)
} else if bytes.Compare(hostReadData, data) != 0 {
} else if !bytes.Equal(hostReadData, data) {
t.Errorf("bytes read differ from what was expected: want = %v, got = %v", data, hostReadData)
}

Expand Down
2 changes: 1 addition & 1 deletion pkg/log/log.go
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,7 @@ func Stacks(all bool) []byte {
}

// stackRegexp matches one level within a stack trace.
var stackRegexp = regexp.MustCompile("(?m)^\\S+\\(.*\\)$\\r?\\n^\\t\\S+:\\d+.*$\\r?\\n")
var stackRegexp = regexp.MustCompile(`(?m)^\S+\(.*\)$\r?\n^\t\S+:\d+.*$\r?\n`)

// LocalStack returns the local goroutine stack, excluding the top N entries.
// LocalStack's own entry is excluded by default and does not need to be counted in excludeTopN.
Expand Down
2 changes: 1 addition & 1 deletion pkg/metric/metric.go
Original file line number Diff line number Diff line change
Expand Up @@ -514,7 +514,7 @@ func nameToPrometheusName(name string) string {
return strings.ReplaceAll(strings.TrimPrefix(name, "/"), "/", "_")
}

var validMetricNameRegexp = re.MustCompile("^(?:/[_\\w]+)+$")
var validMetricNameRegexp = re.MustCompile(`^(?:/[_\w]+)+$`)

// verifyName verifies that the given metric name is a valid path-style metric
// name.
Expand Down
4 changes: 1 addition & 3 deletions pkg/metric/metric_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1149,9 +1149,7 @@ func TestMetricProfiling(t *testing.T) {
for i, m := range test.metricNames {
var fields []Field
if i == 0 {
for _, f := range test.firstMetricFields {
fields = append(fields, f)
}
fields = append(fields, test.firstMetricFields...)
}
newMetric, err := NewUint64Metric(m, Uint64Metadata{
Cumulative: true,
Expand Down
4 changes: 2 additions & 2 deletions pkg/safemem/seq_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ func TestBlockSeqIterBlocks(t *testing.T) {
srcs := test.BlockSeq()
// "Note that a non-nil empty slice and a nil slice ... are not
// deeply equal." - reflect
slices := make([][]byte, 0, 0)
slices := make([][]byte, 0)
for !srcs.IsEmpty() {
src := srcs.Head()
slices = append(slices, src.ToSlice())
Expand Down Expand Up @@ -193,7 +193,7 @@ func TestBlockSeqIterBytes(t *testing.T) {
}
srcs = nextSrcs
}
if got := string(dst.Bytes()); got != test.want {
if got := dst.String(); got != test.want {
t.Errorf("Copied string: got %q, wanted %q", got, test.want)
}
})
Expand Down
16 changes: 8 additions & 8 deletions pkg/segment/set.go
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,7 @@ func (s *Set) InsertWithoutMergingUnchecked(gap GapIterator, r Range, val Value)
if splitMaxGap {
gap.node.updateMaxGapLeaf()
}
return Iterator{gap.node, gap.index}
return Iterator(gap)
}

// InsertRange inserts the given segment into the set. If the new segment can
Expand Down Expand Up @@ -586,7 +586,7 @@ func (s *Set) Remove(seg Iterator) GapIterator {
if trackGaps != 0 {
seg.node.updateMaxGapLeaf()
}
return seg.node.rebalanceAfterRemove(GapIterator{seg.node, seg.index})
return seg.node.rebalanceAfterRemove(GapIterator(seg))
}

// RemoveAll removes all segments from the set. All existing iterators are
Expand Down Expand Up @@ -1735,7 +1735,7 @@ func (seg Iterator) PrevGap() GapIterator {
// must be in a leaf node.
return seg.node.children[seg.index].lastSegment().NextGap()
}
return GapIterator{seg.node, seg.index}
return GapIterator(seg)
}

// NextGap returns the gap immediately after the iterated segment.
Expand Down Expand Up @@ -2036,26 +2036,26 @@ func (n *node) String() string {
func (n *node) writeDebugString(buf *bytes.Buffer, prefix string) {
if n.hasChildren != (n.nrSegments > 0 && n.children[0] != nil) {
buf.WriteString(prefix)
buf.WriteString(fmt.Sprintf("WARNING: inconsistent value of hasChildren: got %v, want %v\n", n.hasChildren, !n.hasChildren))
fmt.Fprintf(buf, "WARNING: inconsistent value of hasChildren: got %v, want %v\n", n.hasChildren, !n.hasChildren)
}
for i := 0; i < n.nrSegments; i++ {
if child := n.children[i]; child != nil {
cprefix := fmt.Sprintf("%s- % 3d ", prefix, i)
if child.parent != n || child.parentIndex != i {
buf.WriteString(cprefix)
buf.WriteString(fmt.Sprintf("WARNING: inconsistent linkage to parent: got (%p, %d), want (%p, %d)\n", child.parent, child.parentIndex, n, i))
fmt.Fprintf(buf, "WARNING: inconsistent linkage to parent: got (%p, %d), want (%p, %d)\n", child.parent, child.parentIndex, n, i)
}
child.writeDebugString(buf, fmt.Sprintf("%s- % 3d ", prefix, i))
}
buf.WriteString(prefix)
if n.hasChildren {
if trackGaps != 0 {
buf.WriteString(fmt.Sprintf("- % 3d: %v => %v, maxGap: %d\n", i, n.keys[i], n.values[i], n.maxGap.Get()))
fmt.Fprintf(buf, "- % 3d: %v => %v, maxGap: %d\n", i, n.keys[i], n.values[i], n.maxGap.Get())
} else {
buf.WriteString(fmt.Sprintf("- % 3d: %v => %v\n", i, n.keys[i], n.values[i]))
fmt.Fprintf(buf, "- % 3d: %v => %v\n", i, n.keys[i], n.values[i])
}
} else {
buf.WriteString(fmt.Sprintf("- % 3d: %v => %v\n", i, n.keys[i], n.values[i]))
fmt.Fprintf(buf, "- % 3d: %v => %v\n", i, n.keys[i], n.values[i])
}
}
if child := n.children[n.nrSegments]; child != nil {
Expand Down
4 changes: 1 addition & 3 deletions pkg/sentry/devices/nvproxy/nvproxy_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -271,9 +271,7 @@ func TestFilterCapabilities(t *testing.T) {
}
capSets = append(capSets, individualCap)
}
for _, capSet := range nvconf.PopularCapabilitySets() {
capSets = append(capSets, capSet)
}
capSets = append(capSets, nvconf.PopularCapabilitySets()...)

// Build all the ABIs.
Init()
Expand Down
2 changes: 1 addition & 1 deletion pkg/sentry/devices/tpuproxy/tpuproxy.go
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ func RegisterTPUv4Device(ctx context.Context, creds *auth.Credentials, root vfs.
return err
}

deviceIDStr := strings.Replace(buf.String(), "0x", "", -1)
deviceIDStr := strings.ReplaceAll(buf.String(), "0x", "")
deviceID, err := strconv.ParseInt(strings.TrimSpace(deviceIDStr), 16, 64)
if err != nil {
return fmt.Errorf("parsing PCI device ID: %w", err)
Expand Down
2 changes: 1 addition & 1 deletion pkg/sentry/fdcollector/fdcollector.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,5 +109,5 @@ func (c *Agent) String() string {
// c.buf.Bytes(), which may be modified after c.mu.Unlock(). If you change
// this function to return []byte for some reason, c.buf.Bytes() needs to
// be cloned instead.
return string(c.buf.Bytes())
return c.buf.String()
}
Loading
Loading