github.com/verrazzano/verrazzano@v1.7.1/tools/vz/pkg/internal/util/report/report.go (about)

     1  // Copyright (c) 2021, 2024, Oracle and/or its affiliates.
     2  // Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
     3  
     4  // Package report handles reporting
     5  package report
     6  
     7  import (
     8  	"errors"
     9  	"fmt"
    10  	"github.com/verrazzano/verrazzano/tools/vz/cmd/version"
    11  	"github.com/verrazzano/verrazzano/tools/vz/pkg/constants"
    12  	"github.com/verrazzano/verrazzano/tools/vz/pkg/helpers"
    13  	"go.uber.org/zap"
    14  	"io/fs"
    15  	"os"
    16  	"path/filepath"
    17  	"reflect"
    18  	"strings"
    19  	"sync"
    20  )
    21  
    22  // NOTE: This is part of the contract with the analyzers however it is currently an initial stake in the ground and
    23  //		 will be evolving rapidly initially as we add analysis cases
    24  
    25  // TODO: We have rudimentary settings and a rudimentary dump of the report to start with here. Ie: Bare bones to get
    26  //       the details out for now, go from there... But there are some things that are already on the radar here:
    27  //
    28  //      1) Format of the human readable report will evolve (wrapping long lines, etc...)
    29  //      2) Other format outputs suitable for automation to process (ie: other automation that will look at the results
    30  //         and do something automatically with it), maybe CSV file, JSON, etc...
    31  //      3) Detail consolidation/redacting suitable for sharing as a Bug report
    32  //      4) Etc...
    33  //
    34  
    35  // For example, when we report them we will want to report:
    36  //		1) Per source (cluster, build, etc...)
    37  //		2) Sort in priority order (worst first...) TODO
    38  
    39  // Tossing around whether per-source, if we have a map for tracking Issues so we have one Issue per type of issue
    40  // and allow contributing supporting data to it (rather than separate issues for each case found if found in different spots
    41  // I'm hesitant to do that now as that reduces the flexibility, and until we really have the analysis drill-down patterns and
    42  // more scenarios in place I think it is premature to do that (we could maybe allow both, but again not sure we need
    43  // that complexity yet either). One good example is that when there a a bunch of pods impacted by the same root cause
    44  // issue, really don't need to spam with a bunch of the same issue (we could add additional supporting data to one root
    45  // issue instead of having an issue for each occurrence), but the analyzer can manage that and knows more about whether
    46  // it is really a different issue or not.
    47  
    48  // We have a map per source. The source is a string here. Generally for clusters it would be something that identifies
    49  // the cluster. But other analyzers may not be looking at a cluster, so they may have some other identification.
    50  // For the current implementation, these are the root file path that the analyzer is looking at.
    51  var reports = make(map[string][]Issue)
    52  var allSourcesAnalyzed = make(map[string]string)
    53  var reportMutex = &sync.Mutex{}
    54  
    55  // ContributeIssuesMap allows a map of issues to be contributed
    56  func ContributeIssuesMap(log *zap.SugaredLogger, source string, issues map[string]Issue) (err error) {
    57  	log.Debugf("ContributeIssues called for source %s with %d issues", source, len(issues))
    58  	if len(source) == 0 {
    59  		return errors.New("ContributeIssues requires a non-empty source be specified")
    60  	}
    61  	for _, issue := range issues {
    62  		err = issue.Validate(log, source)
    63  		if err != nil {
    64  			return err
    65  		}
    66  	}
    67  	reportMutex.Lock()
    68  	reportIssues := reports[source]
    69  	if len(reportIssues) == 0 {
    70  		reportIssues = make([]Issue, 0, 10)
    71  	}
    72  	for _, issue := range issues {
    73  		issue.SupportingData = DeduplicateSupportingDataList(issue.SupportingData)
    74  		reportIssues = append(reportIssues, issue)
    75  	}
    76  	reports[source] = reportIssues
    77  	reportMutex.Unlock()
    78  	return nil
    79  }
    80  
    81  // ContributeIssue allows a single issue to be contributed
    82  func ContributeIssue(log *zap.SugaredLogger, issue Issue) (err error) {
    83  	log.Debugf("ContributeIssue called for source with %v", issue)
    84  	err = issue.Validate(log, "")
    85  	if err != nil {
    86  		log.Debugf("Validate failed %s", err.Error())
    87  		return err
    88  	}
    89  	reportMutex.Lock()
    90  	reportIssues := reports[issue.Source]
    91  	if len(reportIssues) == 0 {
    92  		reportIssues = make([]Issue, 0, 10)
    93  	}
    94  	issue.SupportingData = DeduplicateSupportingDataList(issue.SupportingData)
    95  	reportIssues = append(reportIssues, issue)
    96  	reports[issue.Source] = reportIssues
    97  	reportMutex.Unlock()
    98  	return nil
    99  }
   100  
   101  // GenerateHumanReport is a basic report generator
   102  // TODO: This is super basic for now, need to do things like sort based on Confidence, add other formats on output, etc...
   103  // Also add other niceties like time, Summary of what was analyzed, if no issues were found, etc...
   104  func GenerateHumanReport(log *zap.SugaredLogger, vzHelper helpers.VZHelper, reportCtx helpers.ReportCtx) (err error) {
   105  	// Default to stdout if no reportfile is supplied
   106  	//TODO: Eventually add support other reportFormat type (json)
   107  	writeOut, writeSummaryOut, sepOut := "", "", ""
   108  
   109  	// Lock the report data while generating the report itself
   110  	reportMutex.Lock()
   111  	defer reportMutex.Unlock()
   112  	sourcesWithoutIssues := allSourcesAnalyzed
   113  	for source, reportIssues := range reports {
   114  		log.Debugf("Will report on %d issues that were reported for %s", len(reportIssues), source)
   115  		// We need to filter and sort the list of Issues that will be reported
   116  		// TODO: Need to sort them as well eventually
   117  		actuallyReported := filterReportIssues(log, reportIssues, reportCtx.IncludeInfo, reportCtx.MinConfidence, reportCtx.MinImpact)
   118  		if len(actuallyReported) == 0 {
   119  			log.Debugf("No issues to report for source: %s", source)
   120  			continue
   121  		}
   122  
   123  		// Print the Source as it has issues
   124  		delete(sourcesWithoutIssues, source)
   125  		var issuesDetected string
   126  		if helpers.GetIsLiveCluster() {
   127  			issuesDetected = fmt.Sprintf("Detected %d issues in the cluster:", len(actuallyReported))
   128  		} else {
   129  			issuesDetected = fmt.Sprintf("Detected %d issues for %s:", len(actuallyReported), source)
   130  		}
   131  		sepOut = "\n" + issuesDetected + "\n" + strings.Repeat(constants.LineSeparator, len(issuesDetected)) + "\n"
   132  		for _, issue := range actuallyReported {
   133  			// Display only summary and action when the report-format is set to summary
   134  			if reportCtx.ReportFormat == constants.SummaryReport {
   135  				writeSummaryOut += fmt.Sprintf("\n\tISSUE (%s): %s\n", issue.Type, issue.Summary)
   136  				for _, action := range issue.Actions {
   137  					writeSummaryOut += fmt.Sprintf("\t%s\n", action.Summary)
   138  				}
   139  
   140  			}
   141  			writeOut += fmt.Sprintf("\n\tISSUE (%s)\n\t\tsummary: %s\n", issue.Type, issue.Summary)
   142  			if len(issue.Actions) > 0 && reportCtx.IncludeActions {
   143  				log.Debugf("Output actions")
   144  				writeOut += "\t\tactions:\n"
   145  				for _, action := range issue.Actions {
   146  					writeOut += fmt.Sprintf("\t\t\taction: %s\n", action.Summary)
   147  					if len(action.Steps) > 0 {
   148  						writeOut += "\t\t\t\tSteps:\n"
   149  						for i, step := range action.Steps {
   150  							writeOut += fmt.Sprintf("\t\t\t\t\tStep %d: %s\n", i+1, step)
   151  						}
   152  					}
   153  					if len(action.Links) > 0 {
   154  						writeOut += "\t\t\t\tLinks:\n"
   155  						for _, link := range action.Links {
   156  							writeOut += fmt.Sprintf("\t\t\t\t\t%s\n", link)
   157  						}
   158  					}
   159  				}
   160  			}
   161  			if len(issue.SupportingData) > 0 && reportCtx.IncludeSupportData {
   162  				log.Debugf("Output supporting data")
   163  				writeOut += "\t\tsupportingData:\n"
   164  				for _, data := range issue.SupportingData {
   165  					if len(data.Messages) > 0 {
   166  						writeOut += "\t\t\tmessages:\n"
   167  						for _, message := range data.Messages {
   168  							writeOut += fmt.Sprintf("\t\t\t\t%s\n", message)
   169  						}
   170  					}
   171  					if len(data.TextMatches) > 0 {
   172  						writeOut += "\t\t\tsearch matches:\n"
   173  						for _, match := range data.TextMatches {
   174  							if helpers.GetIsLiveCluster() {
   175  								writeOut += fmt.Sprintf("\t\t\t%s: %s\n", match.FileName, match.MatchedText)
   176  							} else {
   177  								writeOut += fmt.Sprintf("\t\t\t\t%s:%d: %s\n", match.FileName, match.FileLine, match.MatchedText)
   178  							}
   179  						}
   180  					}
   181  					if len(data.JSONPaths) > 0 {
   182  						writeOut += "\t\t\trelated json:\n"
   183  						for _, path := range data.JSONPaths {
   184  							writeOut += fmt.Sprintf("\t\t\t\t%s: %s\n", path.File, path.Path)
   185  						}
   186  					}
   187  					if len(data.RelatedFiles) > 0 {
   188  						writeOut += "\t\t\trelated resource(s):\n"
   189  						for _, fileName := range data.RelatedFiles {
   190  							writeOut += fmt.Sprintf("\t\t\t\t%s\n", fileName)
   191  						}
   192  					}
   193  				}
   194  			}
   195  		}
   196  	}
   197  
   198  	// genTmpReport opens report file at tmp path
   199  	genTmpReport := func(reportFile *string) (*os.File, error) {
   200  		*reportFile = constants.VzAnalysisReportTmpFile
   201  		repFile, err := os.CreateTemp(".", *reportFile)
   202  		if err != nil && (errors.Is(err, fs.ErrPermission) || strings.Contains(err.Error(), constants.ReadOnly)) {
   203  			fmt.Fprintf(vzHelper.GetOutputStream(), "Warning: %s to open report file in current directory\n", fs.ErrPermission)
   204  			repFile, err = os.CreateTemp("", *reportFile)
   205  		}
   206  		return repFile, err
   207  	}
   208  
   209  	// genUsrDefinedReport opens report file at user defined file path
   210  	genUsrDefinedReport := func(reportFile *string) (*os.File, error) {
   211  		return os.OpenFile(*reportFile, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0644)
   212  	}
   213  
   214  	// printReport std outs reports to console
   215  	printReport := func() {
   216  		if reportCtx.ReportFormat == constants.DetailedReport {
   217  			fmt.Fprintf(vzHelper.GetOutputStream(), sepOut+writeOut+version.GetVZCLIVersionMessageString())
   218  		} else if reportCtx.ReportFormat == constants.SummaryReport {
   219  			fmt.Fprintf(vzHelper.GetOutputStream(), sepOut+writeSummaryOut+version.GetVZCLIVersionMessageString())
   220  		}
   221  	}
   222  
   223  	if len(writeOut) > 0 {
   224  		var repFile *os.File
   225  		defer func() {
   226  			if repFile != nil {
   227  				currentDir, _ := os.Getwd()
   228  				fullPath := filepath.Join(currentDir, repFile.Name())
   229  				fmt.Fprintf(os.Stdout, "\nDetailed analysis report available in %s\n", fullPath)
   230  				repFile.Close()
   231  			}
   232  		}()
   233  		if reportCtx.ReportFile == "" {
   234  			// flag --report-file is unset or empty
   235  			repFile, err = genTmpReport(&reportCtx.ReportFile)
   236  		} else {
   237  			// flag --report-file is set
   238  			repFile, err = genUsrDefinedReport(&reportCtx.ReportFile)
   239  		}
   240  		if err != nil {
   241  			log.Errorf("Failed to create report file : %s, error found : %s", reportCtx.ReportFile, err.Error())
   242  			return err
   243  		}
   244  		// writes vz & k8s version, a separator and detected issues to report file
   245  		_, err = repFile.Write([]byte(helpers.GetVersionOut() + sepOut + writeOut + version.GetVZCLIVersionMessageString()))
   246  		if reportCtx.PrintReportToConsole {
   247  			printReport()
   248  		}
   249  		if err != nil {
   250  			log.Errorf("Failed to write to report file %s, error found : %s", reportCtx.ReportFile, err.Error())
   251  			return err
   252  		}
   253  	} else {
   254  		if reportCtx.IncludeInfo {
   255  			if len(sourcesWithoutIssues) > 0 {
   256  				writeOut += "\n\n"
   257  			}
   258  			if len(sourcesWithoutIssues) == 1 {
   259  				// This is a workaround to avoid printing the source when analyzing the live cluster, although it impacts the
   260  				// regular use case with a directory containing a single cluster snapshot
   261  				writeOut += "Verrazzano analysis CLI did not detect any issue in the cluster\n"
   262  			} else {
   263  				for _, source := range sourcesWithoutIssues {
   264  					writeOut += fmt.Sprintf("Verrazzano analysis CLI did not detect any issue in %s\n", source)
   265  				}
   266  			}
   267  			writeOut += version.GetVZCLIVersionMessageString()
   268  			fmt.Fprintf(vzHelper.GetOutputStream(), writeOut)
   269  		}
   270  	}
   271  	return nil
   272  }
   273  
   274  // AddSourceAnalyzed tells the report which sources have been analyzed. This way it knows
   275  // the entire set of sources which were analyzed (not just the ones which had issues detected)
   276  func AddSourceAnalyzed(source string) {
   277  	reportMutex.Lock()
   278  	allSourcesAnalyzed[source] = source
   279  	reportMutex.Unlock()
   280  }
   281  
   282  // GetAllSourcesFilteredIssues is only being exported for the unit tests so they can inspect issues found in a report
   283  func GetAllSourcesFilteredIssues(log *zap.SugaredLogger, includeInfo bool, minConfidence int, minImpact int) (filtered []Issue) {
   284  	reportMutex.Lock()
   285  	for _, reportIssues := range reports {
   286  		subFiltered := filterReportIssues(log, reportIssues, includeInfo, minConfidence, minImpact)
   287  		if len(subFiltered) > 0 {
   288  			filtered = append(filtered, subFiltered...)
   289  		}
   290  	}
   291  	reportMutex.Unlock()
   292  	return filtered
   293  }
   294  
   295  // ClearReports clears the reports map, only for unit tests
   296  func ClearReports() {
   297  	reportMutex.Lock()
   298  	reports = make(map[string][]Issue)
   299  	reportMutex.Unlock()
   300  }
   301  
   302  // compare two structs are same or not
   303  func isEqualStructs(s1, s2 any) bool {
   304  	return reflect.DeepEqual(s1, s2)
   305  }
   306  
   307  // filter out duplicate issues
   308  func deDuplicateIssues(reportIssues []Issue) []Issue {
   309  	var deDuplicates = make([]Issue, 0, len(reportIssues))
   310  	for _, i1 := range reportIssues {
   311  		issueVisited := false
   312  		for _, i2 := range deDuplicates {
   313  			if isEqualStructs(i1, i2) {
   314  				issueVisited = true
   315  				break
   316  			}
   317  		}
   318  		if !issueVisited {
   319  			deDuplicates = append(deDuplicates, i1)
   320  		}
   321  	}
   322  	return deDuplicates
   323  }
   324  
   325  func filterReportIssues(log *zap.SugaredLogger, reportIssues []Issue, includeInfo bool, minConfidence int, minImpact int) (filtered []Issue) {
   326  	filtered = make([]Issue, 0, len(reportIssues))
   327  	for _, issue := range reportIssues {
   328  		// Skip issues that are Informational or lower Confidence that we want
   329  		if issue.Informational && !includeInfo || issue.Confidence < minConfidence || issue.Impact < minImpact {
   330  			log.Debugf("Skipping issue %s based on informational/confidence/impact settings", issue.Summary)
   331  			continue
   332  		}
   333  		filtered = append(filtered, issue)
   334  	}
   335  	return deDuplicateIssues(filtered)
   336  }