github.com/zppinho/prow@v0.0.0-20240510014325-1738badeb017/pkg/spyglass/lenses/metadata/lens_test.go (about)

     1  /*
     2  Copyright 2018 The Kubernetes Authors.
     3  
     4  Licensed under the Apache License, Version 2.0 (the "License");
     5  you may not use this file except in compliance with the License.
     6  You may obtain a copy of the License at
     7  
     8      http://www.apache.org/licenses/LICENSE-2.0
     9  
    10  Unless required by applicable law or agreed to in writing, software
    11  distributed under the License is distributed on an "AS IS" BASIS,
    12  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    13  See the License for the specific language governing permissions and
    14  limitations under the License.
    15  */
    16  
    17  package metadata
    18  
    19  import (
    20  	"encoding/json"
    21  	"reflect"
    22  	"strings"
    23  	"testing"
    24  
    25  	"github.com/google/go-cmp/cmp"
    26  
    27  	v1 "k8s.io/api/core/v1"
    28  	metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
    29  	prowv1 "sigs.k8s.io/prow/pkg/apis/prowjobs/v1"
    30  	"sigs.k8s.io/prow/pkg/config"
    31  
    32  	k8sreporter "sigs.k8s.io/prow/pkg/crier/reporters/gcs/kubernetes"
    33  	"sigs.k8s.io/prow/pkg/spyglass/api"
    34  	"sigs.k8s.io/prow/pkg/spyglass/lenses"
    35  	"sigs.k8s.io/prow/pkg/spyglass/lenses/fake"
    36  )
    37  
    38  type FakeArtifact = fake.Artifact
    39  
    40  // TestCheckTimestamps checks the way in which the started.json and
    41  // finished.json files affect the view. For example, a negative duration should
    42  // result in a warning for the user.
    43  func TestCheckTimestamps(t *testing.T) {
    44  	startedJson := &FakeArtifact{
    45  		Path:    "started.json",
    46  		Content: []byte(`{"timestamp":1676610469}`),
    47  	}
    48  	// This timestamp is *after* the one in startedJson. This is the happy path.
    49  	finishedJsonNormal := &FakeArtifact{
    50  		Path:    "finished.json",
    51  		Content: []byte(`{"timestamp":1676611469,"passed":true,"result":"SUCCESS"}`),
    52  	}
    53  	// This timestamp is *before* the one in startedJson.
    54  	finishedJsonNegative := &FakeArtifact{
    55  		Path:    "finished.json",
    56  		Content: []byte(`{"timestamp":1671827322,"passed":true,"result":"SUCCESS"}`),
    57  	}
    58  	// NOTE: We cannot check for human-readable timestamps because the timezone
    59  	// can differ from a local execution of this test (e.g., PST) versus the
    60  	// timezone used in CI (e.g., UTC). So we make sure to avoid
    61  	// timezone-dependent strings in these test cases.
    62  	testCases := []struct {
    63  		name               string
    64  		artifacts          []api.Artifact
    65  		expectedSubstrings []string
    66  		err                error
    67  	}{
    68  		{
    69  			name: "regular (positive) duration",
    70  			artifacts: []api.Artifact{
    71  				startedJson, finishedJsonNormal,
    72  			},
    73  			expectedSubstrings: []string{`Test started`, `after 16m40s`, `more info`},
    74  			err:                nil,
    75  		},
    76  		{
    77  			name: "negative duration triggers user-facing warning",
    78  			artifacts: []api.Artifact{
    79  				startedJson, finishedJsonNegative,
    80  			},
    81  			expectedSubstrings: []string{`WARNING: The elapsed duration (-1328h39m7s) is negative. This can be caused by another process outside of Prow writing into the finished.json file. The file currently has a completion time of`},
    82  			err:                nil,
    83  		},
    84  	}
    85  	for _, tc := range testCases {
    86  		lens, err := lenses.GetLens("metadata")
    87  		if tc.err != err {
    88  			t.Errorf("%s expected error %v but got error %v", tc.name, tc.err, err)
    89  			continue
    90  		}
    91  		if tc.err == nil && lens == nil {
    92  			t.Fatal("Expected lens 'metadata' but got nil.")
    93  		}
    94  		got := lens.Body(tc.artifacts, "", "", nil, config.Spyglass{})
    95  		for _, expectedSubstring := range tc.expectedSubstrings {
    96  			if !strings.Contains(got, expectedSubstring) {
    97  				t.Errorf("%s: failed to find expected substring %v in %v", tc.name, expectedSubstring, got)
    98  			}
    99  		}
   100  	}
   101  }
   102  
   103  func TestFlattenMetadata(t *testing.T) {
   104  	tests := []struct {
   105  		name        string
   106  		metadata    map[string]interface{}
   107  		expectedMap map[string]string
   108  	}{
   109  		{
   110  			name:        "Empty map",
   111  			metadata:    map[string]interface{}{},
   112  			expectedMap: map[string]string{},
   113  		},
   114  		{
   115  			name: "Test metadata",
   116  			metadata: map[string]interface{}{
   117  				"field1": "value1",
   118  				"field2": "value2",
   119  				"field3": "value3",
   120  			},
   121  			expectedMap: map[string]string{
   122  				"field1": "value1",
   123  				"field2": "value2",
   124  				"field3": "value3",
   125  			},
   126  		},
   127  		{
   128  			name: "Test metadata with non-strings",
   129  			metadata: map[string]interface{}{
   130  				"field1": "value1",
   131  				"field2": 2,
   132  				"field3": true,
   133  				"field4": "value4",
   134  			},
   135  			expectedMap: map[string]string{
   136  				"field1": "value1",
   137  				"field4": "value4",
   138  			},
   139  		},
   140  		{
   141  			name: "Test nested metadata",
   142  			metadata: map[string]interface{}{
   143  				"field1": "value1",
   144  				"field2": "value2",
   145  				"field3": map[string]interface{}{
   146  					"nest1-field1": "nest1-value1",
   147  					"nest1-field2": "nest1-value2",
   148  					"nest1-field3": map[string]interface{}{
   149  						"nest2-field1": "nest2-value1",
   150  						"nest2-field2": "nest2-value2",
   151  					},
   152  				},
   153  				"field4": "value4",
   154  			},
   155  			expectedMap: map[string]string{
   156  				"field1":                           "value1",
   157  				"field2":                           "value2",
   158  				"field3.nest1-field1":              "nest1-value1",
   159  				"field3.nest1-field2":              "nest1-value2",
   160  				"field3.nest1-field3.nest2-field1": "nest2-value1",
   161  				"field3.nest1-field3.nest2-field2": "nest2-value2",
   162  				"field4":                           "value4",
   163  			},
   164  		},
   165  	}
   166  
   167  	lens := Lens{}
   168  	for _, test := range tests {
   169  		flattenedMetadata := lens.flattenMetadata(test.metadata)
   170  		if !reflect.DeepEqual(flattenedMetadata, test.expectedMap) {
   171  			t.Errorf("%s: resulting map did not match expected map: %v", test.name, cmp.Diff(flattenedMetadata, test.expectedMap))
   172  		}
   173  	}
   174  }
   175  
   176  func TestHintFromPodInfo(t *testing.T) {
   177  	tests := []struct {
   178  		name     string
   179  		info     k8sreporter.PodReport
   180  		expected string
   181  	}{
   182  		{
   183  			name: "normal failed run has no output",
   184  			info: k8sreporter.PodReport{
   185  				Pod: &v1.Pod{
   186  					ObjectMeta: metav1.ObjectMeta{
   187  						Name: "8ef160fc-46b6-11ea-a907-1a9873703b03",
   188  					},
   189  					Spec: v1.PodSpec{
   190  						Containers: []v1.Container{
   191  							{
   192  								Name:  "test",
   193  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   194  							},
   195  						},
   196  					},
   197  					Status: v1.PodStatus{
   198  						Phase: v1.PodFailed,
   199  						ContainerStatuses: []v1.ContainerStatus{
   200  							{
   201  								Name:  "test",
   202  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   203  								Ready: false,
   204  								State: v1.ContainerState{
   205  									Terminated: &v1.ContainerStateTerminated{
   206  										ExitCode: 1,
   207  										Reason:   "Completed",
   208  									},
   209  								},
   210  							},
   211  						},
   212  					},
   213  				},
   214  			},
   215  		},
   216  		{
   217  			name:     "stuck images are reported by name",
   218  			expected: `The test container could not start because it could not pull "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master". Check your images. Full message: "rpc error: code = Unknown desc"`,
   219  			info: k8sreporter.PodReport{
   220  				Pod: &v1.Pod{
   221  					ObjectMeta: metav1.ObjectMeta{
   222  						Name: "8ef160fc-46b6-11ea-a907-1a9873703b03",
   223  					},
   224  					Spec: v1.PodSpec{
   225  						Containers: []v1.Container{
   226  							{
   227  								Name:  "test",
   228  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   229  							},
   230  						},
   231  					},
   232  					Status: v1.PodStatus{
   233  						Phase: v1.PodPending,
   234  						ContainerStatuses: []v1.ContainerStatus{
   235  							{
   236  								Name:  "test",
   237  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   238  								Ready: false,
   239  								State: v1.ContainerState{
   240  									Waiting: &v1.ContainerStateWaiting{
   241  										Reason:  "ImagePullBackOff",
   242  										Message: "rpc error: code = Unknown desc",
   243  									},
   244  								},
   245  							},
   246  						},
   247  					},
   248  				},
   249  			},
   250  		},
   251  		{
   252  			name:     "stuck images are reported by name - errimagepull",
   253  			expected: `The test container could not start because it could not pull "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master". Check your images. Full message: "rpc error: code = Unknown desc"`,
   254  			info: k8sreporter.PodReport{
   255  				Pod: &v1.Pod{
   256  					ObjectMeta: metav1.ObjectMeta{
   257  						Name: "8ef160fc-46b6-11ea-a907-1a9873703b03",
   258  					},
   259  					Spec: v1.PodSpec{
   260  						Containers: []v1.Container{
   261  							{
   262  								Name:  "test",
   263  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   264  							},
   265  						},
   266  					},
   267  					Status: v1.PodStatus{
   268  						Phase: v1.PodPending,
   269  						ContainerStatuses: []v1.ContainerStatus{
   270  							{
   271  								Name:  "test",
   272  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   273  								Ready: false,
   274  								State: v1.ContainerState{
   275  									Waiting: &v1.ContainerStateWaiting{
   276  										Reason:  "ErrImagePull",
   277  										Message: "rpc error: code = Unknown desc",
   278  									},
   279  								},
   280  							},
   281  						},
   282  					},
   283  				},
   284  			},
   285  		},
   286  		{
   287  			name:     "stuck volumes are reported by name",
   288  			expected: `The pod could not start because it could not mount the volume "some-volume": secrets "no-such-secret" not found`,
   289  			info: k8sreporter.PodReport{
   290  				Pod: &v1.Pod{
   291  					ObjectMeta: metav1.ObjectMeta{
   292  						Name: "8ef160fc-46b6-11ea-a907-1a9873703b03",
   293  					},
   294  					Spec: v1.PodSpec{
   295  						Containers: []v1.Container{
   296  							{
   297  								Name:  "test",
   298  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   299  								VolumeMounts: []v1.VolumeMount{
   300  									{
   301  										Name:      "some-volume",
   302  										MountPath: "/mnt/some-volume",
   303  									},
   304  								},
   305  							},
   306  						},
   307  						Volumes: []v1.Volume{
   308  							{
   309  								Name: "some-volume",
   310  								VolumeSource: v1.VolumeSource{
   311  									Secret: &v1.SecretVolumeSource{
   312  										SecretName: "no-such-secret",
   313  									},
   314  								},
   315  							},
   316  						},
   317  					},
   318  					Status: v1.PodStatus{
   319  						Phase: v1.PodPending,
   320  						ContainerStatuses: []v1.ContainerStatus{
   321  							{
   322  								Name:  "test",
   323  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   324  								Ready: false,
   325  								State: v1.ContainerState{
   326  									Waiting: &v1.ContainerStateWaiting{
   327  										Reason: "ContainerCreating",
   328  									},
   329  								},
   330  							},
   331  						},
   332  					},
   333  				},
   334  				Events: []v1.Event{
   335  					{
   336  						Type:    "Warning",
   337  						Reason:  "FailedMount",
   338  						Message: `MountVolume.SetUp failed for volume "some-volume" : secrets "no-such-secret" not found`,
   339  					},
   340  				},
   341  			},
   342  		},
   343  		{
   344  			name:     "pod scheduled to an illegal node is reported",
   345  			expected: "The job could not start because it was scheduled to a node that does not satisfy its NodeSelector",
   346  			info: k8sreporter.PodReport{
   347  				Pod: &v1.Pod{
   348  					ObjectMeta: metav1.ObjectMeta{
   349  						Name: "8ef160fc-46b6-11ea-a907-1a9873703b03",
   350  					},
   351  					Spec: v1.PodSpec{
   352  						Containers: []v1.Container{
   353  							{
   354  								Name:  "test",
   355  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   356  							},
   357  						},
   358  					},
   359  					Status: v1.PodStatus{
   360  						Phase:  v1.PodFailed,
   361  						Reason: "MatchNodeSelector",
   362  					},
   363  				},
   364  			},
   365  		},
   366  		{
   367  			name:     "pod that could not be scheduled is reported",
   368  			expected: "There are no nodes that your pod can schedule to - check your requests, tolerations, and node selectors (0/3 nodes are available: 3 node(s) didn't match node selector.)",
   369  			info: k8sreporter.PodReport{
   370  				Pod: &v1.Pod{
   371  					ObjectMeta: metav1.ObjectMeta{
   372  						Name: "8ef160fc-46b6-11ea-a907-1a9873703b03",
   373  					},
   374  					Spec: v1.PodSpec{
   375  						Containers: []v1.Container{
   376  							{
   377  								Name:  "test",
   378  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   379  							},
   380  						},
   381  					},
   382  					Status: v1.PodStatus{
   383  						Phase: v1.PodPending,
   384  					},
   385  				},
   386  				Events: []v1.Event{
   387  					{
   388  						Type:    "Warning",
   389  						Reason:  "FailedScheduling",
   390  						Message: "0/3 nodes are available: 3 node(s) didn't match node selector.",
   391  					},
   392  				},
   393  			},
   394  		},
   395  		{
   396  			name:     "apparent node failure is reported as such",
   397  			expected: "The job may have executed on an unhealthy node. Contact your prow maintainers with a link to this page or check the detailed pod information.",
   398  			info: k8sreporter.PodReport{
   399  				Pod: &v1.Pod{
   400  					ObjectMeta: metav1.ObjectMeta{
   401  						Name: "8ef160fc-46b6-11ea-a907-1a9873703b03",
   402  					},
   403  					Spec: v1.PodSpec{
   404  						Containers: []v1.Container{
   405  							{
   406  								Name:  "test",
   407  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   408  							},
   409  						},
   410  					},
   411  					Status: v1.PodStatus{
   412  						Phase: v1.PodPending,
   413  						ContainerStatuses: []v1.ContainerStatus{
   414  							{
   415  								Name:  "test",
   416  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   417  								Ready: false,
   418  								State: v1.ContainerState{
   419  									Waiting: &v1.ContainerStateWaiting{
   420  										Reason: "ContainerCreating",
   421  									},
   422  								},
   423  							},
   424  						},
   425  					},
   426  				},
   427  				Events: []v1.Event{
   428  					{
   429  						Type:   "Warning",
   430  						Reason: "FailedCreatePodSandbox",
   431  					},
   432  				},
   433  			},
   434  		},
   435  		{
   436  			name:     "init container failed to start",
   437  			expected: "Init container initupload not ready: (state: terminated, reason: \"Error\", message: \"failed fetching oauth2 token\")",
   438  			info: k8sreporter.PodReport{
   439  				Pod: &v1.Pod{
   440  					ObjectMeta: metav1.ObjectMeta{
   441  						Name: "8ef160fc-46b6-11ea-a907-1a9873703b03",
   442  					},
   443  					Spec: v1.PodSpec{
   444  						Containers: []v1.Container{
   445  							{
   446  								Name:  "test",
   447  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   448  							},
   449  						},
   450  					},
   451  					Status: v1.PodStatus{
   452  						Phase: v1.PodPending,
   453  						InitContainerStatuses: []v1.ContainerStatus{
   454  							{
   455  								Name:  "initupload",
   456  								Ready: false,
   457  								State: v1.ContainerState{
   458  									Terminated: &v1.ContainerStateTerminated{
   459  										Reason:  "Error",
   460  										Message: "failed fetching oauth2 token",
   461  									},
   462  								},
   463  							},
   464  						},
   465  						ContainerStatuses: []v1.ContainerStatus{
   466  							{
   467  								Name:  "test",
   468  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   469  								Ready: false,
   470  								State: v1.ContainerState{
   471  									Waiting: &v1.ContainerStateWaiting{
   472  										Reason: "PodInitializing",
   473  									},
   474  								},
   475  							},
   476  						},
   477  					},
   478  				},
   479  			},
   480  		},
   481  		{
   482  			name:     "init container running but not ready",
   483  			expected: "Init container initupload not ready: (state: running)",
   484  			info: k8sreporter.PodReport{
   485  				Pod: &v1.Pod{
   486  					ObjectMeta: metav1.ObjectMeta{
   487  						Name: "8ef160fc-46b6-11ea-a907-1a9873703b03",
   488  					},
   489  					Spec: v1.PodSpec{
   490  						Containers: []v1.Container{
   491  							{
   492  								Name:  "test",
   493  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   494  							},
   495  						},
   496  					},
   497  					Status: v1.PodStatus{
   498  						Phase: v1.PodPending,
   499  						InitContainerStatuses: []v1.ContainerStatus{
   500  							{
   501  								Name:  "initupload",
   502  								Ready: false,
   503  								State: v1.ContainerState{
   504  									Running: &v1.ContainerStateRunning{},
   505  								},
   506  							},
   507  						},
   508  						ContainerStatuses: []v1.ContainerStatus{
   509  							{
   510  								Name:  "test",
   511  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   512  								Ready: false,
   513  								State: v1.ContainerState{
   514  									Waiting: &v1.ContainerStateWaiting{
   515  										Reason: "PodInitializing",
   516  									},
   517  								},
   518  							},
   519  						},
   520  					},
   521  				},
   522  			},
   523  		},
   524  		{
   525  			name:     "multiple init containers failed to start",
   526  			expected: "Init container entrypoint not ready: (state: waiting, reason: \"PodInitializing\", message: \"\")\nInit container initupload not ready: (state: terminated, reason: \"Error\", message: \"failed fetching oauth2 token\")",
   527  			info: k8sreporter.PodReport{
   528  				Pod: &v1.Pod{
   529  					ObjectMeta: metav1.ObjectMeta{
   530  						Name: "8ef160fc-46b6-11ea-a907-1a9873703b03",
   531  					},
   532  					Spec: v1.PodSpec{
   533  						Containers: []v1.Container{
   534  							{
   535  								Name:  "test",
   536  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   537  							},
   538  						},
   539  					},
   540  					Status: v1.PodStatus{
   541  						Phase: v1.PodPending,
   542  						InitContainerStatuses: []v1.ContainerStatus{
   543  							{
   544  								Name:  "entrypoint",
   545  								Ready: false,
   546  								State: v1.ContainerState{
   547  									Waiting: &v1.ContainerStateWaiting{
   548  										Reason:  "PodInitializing",
   549  										Message: "",
   550  									},
   551  								},
   552  							},
   553  							{
   554  								Name:  "initupload",
   555  								Ready: false,
   556  								State: v1.ContainerState{
   557  									Terminated: &v1.ContainerStateTerminated{
   558  										Reason:  "Error",
   559  										Message: "failed fetching oauth2 token",
   560  									},
   561  								},
   562  							},
   563  						},
   564  						ContainerStatuses: []v1.ContainerStatus{
   565  							{
   566  								Name:  "test",
   567  								Image: "gcr.io/k8s-staging-test-infra/kubekins-e2e:latest-master",
   568  								Ready: false,
   569  								State: v1.ContainerState{
   570  									Waiting: &v1.ContainerStateWaiting{
   571  										Reason: "PodInitializing",
   572  									},
   573  								},
   574  							},
   575  						},
   576  					},
   577  				},
   578  			},
   579  		},
   580  	}
   581  
   582  	for _, tc := range tests {
   583  		t.Run(tc.name, func(t *testing.T) {
   584  			b, err := json.Marshal(tc.info)
   585  			if err != nil {
   586  				t.Fatalf("Unexpected failed to marshal pod to JSON (this wasn't even part of the test!): %v", err)
   587  			}
   588  			result := hintFromPodInfo(b)
   589  			if result != tc.expected {
   590  				t.Errorf("Expected hint %q, but got %q", tc.expected, result)
   591  			}
   592  		})
   593  	}
   594  }
   595  
   596  func TestHintFromProwJob(t *testing.T) {
   597  	tests := []struct {
   598  		name            string
   599  		expected        string
   600  		expectedErrored bool
   601  		pj              prowv1.ProwJob
   602  	}{
   603  		{
   604  			name:            "errored job has its description reported",
   605  			expected:        "Job execution failed: this is the description",
   606  			expectedErrored: true,
   607  			pj: prowv1.ProwJob{
   608  				Status: prowv1.ProwJobStatus{
   609  					State:       prowv1.ErrorState,
   610  					Description: "this is the description",
   611  				},
   612  			},
   613  		},
   614  		{
   615  			name:     "failed prowjob reports nothing",
   616  			expected: "",
   617  			pj: prowv1.ProwJob{
   618  				Status: prowv1.ProwJobStatus{
   619  					State:       prowv1.FailureState,
   620  					Description: "this is another description",
   621  				},
   622  			},
   623  		},
   624  		{
   625  			name:     "aborted prowjob reports nothing",
   626  			expected: "",
   627  			pj: prowv1.ProwJob{
   628  				Status: prowv1.ProwJobStatus{
   629  					State:       prowv1.AbortedState,
   630  					Description: "this is another description",
   631  				},
   632  			},
   633  		},
   634  		{
   635  			name:     "successful prowjob reports nothing",
   636  			expected: "",
   637  			pj: prowv1.ProwJob{
   638  				Status: prowv1.ProwJobStatus{
   639  					State:       prowv1.SuccessState,
   640  					Description: "this is another description",
   641  				},
   642  			},
   643  		},
   644  		{
   645  			name:     "pending prowjob reports nothing",
   646  			expected: "",
   647  			pj: prowv1.ProwJob{
   648  				Status: prowv1.ProwJobStatus{
   649  					State:       prowv1.PendingState,
   650  					Description: "this is another description",
   651  				},
   652  			},
   653  		},
   654  		{
   655  			name:     "triggered prowjob reports nothing",
   656  			expected: "",
   657  			pj: prowv1.ProwJob{
   658  				Status: prowv1.ProwJobStatus{
   659  					State:       prowv1.TriggeredState,
   660  					Description: "this is another description",
   661  				},
   662  			},
   663  		},
   664  	}
   665  
   666  	for _, tc := range tests {
   667  		t.Run(tc.name, func(t *testing.T) {
   668  			b, err := json.Marshal(tc.pj)
   669  			if err != nil {
   670  				t.Fatalf("Unexpected failed to marshal prowjob to JSON (this wasn't even part of the test!): %v", err)
   671  			}
   672  			result, errored := hintFromProwJob(b)
   673  			if result != tc.expected {
   674  				t.Errorf("Expected hint %q, but got %q", tc.expected, result)
   675  			}
   676  			if errored != tc.expectedErrored {
   677  				t.Errorf("Expected errored to be %t, but got %t", tc.expectedErrored, errored)
   678  			}
   679  		})
   680  	}
   681  }