@@ -2,17 +2,10 @@ package goanalysis
22
33import (
44 "fmt"
5- "runtime"
6- "sort"
7- "strings"
8- "sync"
9- "sync/atomic"
10- "time"
115
126 "golang.org/x/tools/go/analysis"
137 "golang.org/x/tools/go/packages"
148
15- "github.com/golangci/golangci-lint/internal/cache"
169 "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors"
1710 "github.com/golangci/golangci-lint/pkg/lint/linter"
1811 "github.com/golangci/golangci-lint/pkg/logutils"
@@ -119,158 +112,3 @@ func buildIssues(diags []Diagnostic, linterNameBuilder func(diag *Diagnostic) st
119112 }
120113 return issues
121114}
122-
123- func getIssuesCacheKey (analyzers []* analysis.Analyzer ) string {
124- return "lint/result:" + analyzersHashID (analyzers )
125- }
126-
127- func saveIssuesToCache (allPkgs []* packages.Package , pkgsFromCache map [* packages.Package ]bool ,
128- issues []result.Issue , lintCtx * linter.Context , analyzers []* analysis.Analyzer ,
129- ) {
130- startedAt := time .Now ()
131- perPkgIssues := map [* packages.Package ][]result.Issue {}
132- for ind := range issues {
133- i := & issues [ind ]
134- perPkgIssues [i .Pkg ] = append (perPkgIssues [i .Pkg ], * i )
135- }
136-
137- var savedIssuesCount int64 = 0
138- lintResKey := getIssuesCacheKey (analyzers )
139-
140- workerCount := runtime .GOMAXPROCS (- 1 )
141- var wg sync.WaitGroup
142- wg .Add (workerCount )
143-
144- pkgCh := make (chan * packages.Package , len (allPkgs ))
145- for i := 0 ; i < workerCount ; i ++ {
146- go func () {
147- defer wg .Done ()
148- for pkg := range pkgCh {
149- pkgIssues := perPkgIssues [pkg ]
150- encodedIssues := make ([]EncodingIssue , 0 , len (pkgIssues ))
151- for ind := range pkgIssues {
152- i := & pkgIssues [ind ]
153- encodedIssues = append (encodedIssues , EncodingIssue {
154- FromLinter : i .FromLinter ,
155- Text : i .Text ,
156- Severity : i .Severity ,
157- Pos : i .Pos ,
158- LineRange : i .LineRange ,
159- Replacement : i .Replacement ,
160- ExpectNoLint : i .ExpectNoLint ,
161- ExpectedNoLintLinter : i .ExpectedNoLintLinter ,
162- })
163- }
164-
165- atomic .AddInt64 (& savedIssuesCount , int64 (len (encodedIssues )))
166- if err := lintCtx .PkgCache .Put (pkg , cache .HashModeNeedAllDeps , lintResKey , encodedIssues ); err != nil {
167- lintCtx .Log .Infof ("Failed to save package %s issues (%d) to cache: %s" , pkg , len (pkgIssues ), err )
168- } else {
169- issuesCacheDebugf ("Saved package %s issues (%d) to cache" , pkg , len (pkgIssues ))
170- }
171- }
172- }()
173- }
174-
175- for _ , pkg := range allPkgs {
176- if pkgsFromCache [pkg ] {
177- continue
178- }
179-
180- pkgCh <- pkg
181- }
182- close (pkgCh )
183- wg .Wait ()
184-
185- lintCtx .PkgCache .Close ()
186-
187- issuesCacheDebugf ("Saved %d issues from %d packages to cache in %s" , savedIssuesCount , len (allPkgs ), time .Since (startedAt ))
188- }
189-
190- func loadIssuesFromCache (pkgs []* packages.Package , lintCtx * linter.Context ,
191- analyzers []* analysis.Analyzer ,
192- ) (issuesFromCache []result.Issue , pkgsFromCache map [* packages.Package ]bool ) {
193- startedAt := time .Now ()
194-
195- lintResKey := getIssuesCacheKey (analyzers )
196- type cacheRes struct {
197- issues []result.Issue
198- loadErr error
199- }
200- pkgToCacheRes := make (map [* packages.Package ]* cacheRes , len (pkgs ))
201- for _ , pkg := range pkgs {
202- pkgToCacheRes [pkg ] = & cacheRes {}
203- }
204-
205- workerCount := runtime .GOMAXPROCS (- 1 )
206- var wg sync.WaitGroup
207- wg .Add (workerCount )
208-
209- pkgCh := make (chan * packages.Package , len (pkgs ))
210- for range workerCount {
211- go func () {
212- defer wg .Done ()
213- for pkg := range pkgCh {
214- var pkgIssues []EncodingIssue
215- err := lintCtx .PkgCache .Get (pkg , cache .HashModeNeedAllDeps , lintResKey , & pkgIssues )
216- cacheRes := pkgToCacheRes [pkg ]
217- cacheRes .loadErr = err
218- if err != nil {
219- continue
220- }
221- if len (pkgIssues ) == 0 {
222- continue
223- }
224-
225- issues := make ([]result.Issue , 0 , len (pkgIssues ))
226- for i := range pkgIssues {
227- issue := & pkgIssues [i ]
228- issues = append (issues , result.Issue {
229- FromLinter : issue .FromLinter ,
230- Text : issue .Text ,
231- Severity : issue .Severity ,
232- Pos : issue .Pos ,
233- LineRange : issue .LineRange ,
234- Replacement : issue .Replacement ,
235- Pkg : pkg ,
236- ExpectNoLint : issue .ExpectNoLint ,
237- ExpectedNoLintLinter : issue .ExpectedNoLintLinter ,
238- })
239- }
240- cacheRes .issues = issues
241- }
242- }()
243- }
244-
245- for _ , pkg := range pkgs {
246- pkgCh <- pkg
247- }
248- close (pkgCh )
249- wg .Wait ()
250-
251- loadedIssuesCount := 0
252- pkgsFromCache = map [* packages.Package ]bool {}
253- for pkg , cacheRes := range pkgToCacheRes {
254- if cacheRes .loadErr == nil {
255- loadedIssuesCount += len (cacheRes .issues )
256- pkgsFromCache [pkg ] = true
257- issuesFromCache = append (issuesFromCache , cacheRes .issues ... )
258- issuesCacheDebugf ("Loaded package %s issues (%d) from cache" , pkg , len (cacheRes .issues ))
259- } else {
260- issuesCacheDebugf ("Didn't load package %s issues from cache: %s" , pkg , cacheRes .loadErr )
261- }
262- }
263- issuesCacheDebugf ("Loaded %d issues from cache in %s, analyzing %d/%d packages" ,
264- loadedIssuesCount , time .Since (startedAt ), len (pkgs )- len (pkgsFromCache ), len (pkgs ))
265- return issuesFromCache , pkgsFromCache
266- }
267-
268- func analyzersHashID (analyzers []* analysis.Analyzer ) string {
269- names := make ([]string , 0 , len (analyzers ))
270- for _ , a := range analyzers {
271- names = append (names , a .Name )
272- }
273-
274- sort .Strings (names )
275- return strings .Join (names , "," )
276- }
0 commit comments