github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/engine/jobmaster/dm/api_test.go (about) 1 // Copyright 2022 PingCAP, Inc. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // http://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // See the License for the specific language governing permissions and 12 // limitations under the License. 13 14 package dm 15 16 import ( 17 "context" 18 "encoding/json" 19 "fmt" 20 "sort" 21 "strings" 22 "testing" 23 "time" 24 25 "github.com/DATA-DOG/go-sqlmock" 26 "github.com/pingcap/log" 27 "github.com/pingcap/tiflow/dm/checker" 28 dmconfig "github.com/pingcap/tiflow/dm/config" 29 "github.com/pingcap/tiflow/dm/master" 30 "github.com/pingcap/tiflow/dm/pb" 31 "github.com/pingcap/tiflow/dm/pkg/conn" 32 frameModel "github.com/pingcap/tiflow/engine/framework/model" 33 "github.com/pingcap/tiflow/engine/jobmaster/dm/config" 34 "github.com/pingcap/tiflow/engine/jobmaster/dm/metadata" 35 "github.com/pingcap/tiflow/engine/jobmaster/dm/runtime" 36 dmpkg "github.com/pingcap/tiflow/engine/pkg/dm" 37 resModel "github.com/pingcap/tiflow/engine/pkg/externalresource/model" 38 kvmock "github.com/pingcap/tiflow/engine/pkg/meta/mock" 39 "github.com/pingcap/tiflow/engine/pkg/promutil" 40 "github.com/pingcap/tiflow/pkg/errors" 41 "github.com/stretchr/testify/mock" 42 "github.com/stretchr/testify/require" 43 ) 44 45 func TestQueryStatusAPI(t *testing.T) { 46 var ( 47 ctx = context.Background() 48 metaKVClient = kvmock.NewMetaMock() 49 mockBaseJobmaster = &MockBaseJobmaster{t: t} 50 jm = &JobMaster{ 51 BaseJobMaster: mockBaseJobmaster, 52 metadata: metadata.NewMetaData(metaKVClient, log.L()), 53 } 54 jobCfg = &config.JobCfg{ModRevision: 4} 55 taskCfg = jobCfg.ToTaskCfg() 56 job = &metadata.Job{ 57 Tasks: map[string]*metadata.Task{ 58 "task1": {Stage: metadata.StagePaused, Cfg: taskCfg}, 59 "task2": {Stage: metadata.StageFinished, Cfg: taskCfg}, 60 "task3": {Stage: metadata.StageFinished, Cfg: taskCfg}, 61 "task4": {Stage: metadata.StageRunning, Cfg: taskCfg}, 62 "task5": {Stage: metadata.StageRunning, Cfg: taskCfg}, 63 "task6": {Stage: metadata.StageRunning, Cfg: taskCfg}, 64 "task7": {Stage: metadata.StageFinished, Cfg: taskCfg}, 65 }, 66 } 67 dumpStatus = &pb.DumpStatus{ 68 TotalTables: 10, 69 CompletedTables: 1, 70 FinishedBytes: 100, 71 FinishedRows: 10, 72 EstimateTotalRows: 1000, 73 Bps: 1000, 74 Progress: "20.00 %", 75 } 76 loadStatus = &pb.LoadStatus{ 77 FinishedBytes: 4, 78 TotalBytes: 100, 79 Progress: "4%", 80 MetaBinlog: "mysql-bin.000002, 8", 81 MetaBinlogGTID: "1-2-3", 82 Bps: 1000, 83 } 84 syncStatus = &pb.SyncStatus{ 85 TotalRows: 10, 86 TotalRps: 10, 87 RecentRps: 10, 88 MasterBinlog: "mysql-bin.000002, 4", 89 MasterBinlogGtid: "1-2-20", 90 SyncerBinlog: "mysql-bin.000001, 432", 91 SyncerBinlogGtid: "1-2-10", 92 BlockingDDLs: []string{"ALTER TABLE db.tb ADD COLUMN a INT"}, 93 Synced: false, 94 BinlogType: "remote", 95 SecondsBehindMaster: 10, 96 BlockDDLOwner: "", 97 ConflictMsg: "", 98 } 99 processError = &dmpkg.ProcessError{ 100 ErrCode: 1, 101 ErrClass: "class", 102 ErrScope: "scope", 103 ErrLevel: "low", 104 Message: "msg", 105 RawCause: "raw cause", 106 Workaround: "workaround", 107 } 108 dumpStatusBytes, _ = json.Marshal(dumpStatus) 109 loadStatusBytes, _ = json.Marshal(loadStatus) 110 syncStatusBytes, _ = json.Marshal(syncStatus) 111 dumpStatusResp = &dmpkg.QueryStatusResponse{Unit: frameModel.WorkerDMDump, Stage: metadata.StageRunning, Status: dumpStatusBytes, IoTotalBytes: 0, DumpIoTotalBytes: 0} 112 loadStatusResp = &dmpkg.QueryStatusResponse{Unit: frameModel.WorkerDMLoad, Stage: metadata.StagePaused, Result: &dmpkg.ProcessResult{IsCanceled: true}, Status: loadStatusBytes, IoTotalBytes: 0, DumpIoTotalBytes: 0} 113 syncStatusResp = &dmpkg.QueryStatusResponse{Unit: frameModel.WorkerDMSync, Stage: metadata.StageError, Result: &dmpkg.ProcessResult{Errors: []*dmpkg.ProcessError{processError}}, Status: syncStatusBytes, IoTotalBytes: 0, DumpIoTotalBytes: 0} 114 dumpTime, _ = time.Parse(time.RFC3339Nano, "2020-11-04T18:47:57.43382274+08:00") 115 loadTime, _ = time.Parse(time.RFC3339Nano, "2020-11-04T19:47:57.43382274+08:00") 116 syncTime, _ = time.Parse(time.RFC3339Nano, "2020-11-04T20:47:57.43382274+08:00") 117 dumpDuration = time.Hour 118 loadDuration = time.Minute 119 unitState = &metadata.UnitState{ 120 CurrentUnitStatus: map[string]*metadata.UnitStatus{ 121 // task1's worker not found, and current unit status is not stored 122 // task2's worker not found 123 "task2": {CreatedTime: syncTime}, 124 "task3": {CreatedTime: dumpTime}, 125 "task4": {CreatedTime: dumpTime}, 126 "task5": {CreatedTime: loadTime}, 127 "task6": {CreatedTime: syncTime}, 128 // task7's worker not found 129 "task7": {CreatedTime: syncTime}, 130 }, 131 FinishedUnitStatus: map[string][]*metadata.FinishedTaskStatus{ 132 "task2": { 133 &metadata.FinishedTaskStatus{ 134 TaskStatus: metadata.TaskStatus{ 135 Unit: frameModel.WorkerDMDump, 136 Task: "task2", 137 Stage: metadata.StageFinished, 138 CfgModRevision: 3, 139 StageUpdatedTime: loadTime, 140 }, 141 Status: dumpStatusBytes, 142 Duration: dumpDuration, 143 }, 144 &metadata.FinishedTaskStatus{ 145 TaskStatus: metadata.TaskStatus{ 146 Unit: frameModel.WorkerDMLoad, 147 Task: "task2", 148 Stage: metadata.StageFinished, 149 CfgModRevision: 3, 150 StageUpdatedTime: syncTime, 151 }, 152 Status: loadStatusBytes, 153 Duration: loadDuration, 154 }, 155 }, 156 "task7": { 157 &metadata.FinishedTaskStatus{ 158 TaskStatus: metadata.TaskStatus{ 159 Unit: frameModel.WorkerDMDump, 160 Task: "task7", 161 Stage: metadata.StageFinished, 162 CfgModRevision: 4, 163 StageUpdatedTime: loadTime, 164 }, 165 Status: dumpStatusBytes, 166 Duration: dumpDuration, 167 }, 168 &metadata.FinishedTaskStatus{ 169 TaskStatus: metadata.TaskStatus{ 170 Unit: frameModel.WorkerDMLoad, 171 Task: "task7", 172 Stage: metadata.StageFinished, 173 CfgModRevision: 4, 174 StageUpdatedTime: syncTime, 175 }, 176 Status: loadStatusBytes, 177 Duration: loadDuration, 178 }, 179 }, 180 }, 181 } 182 ) 183 messageAgent := &dmpkg.MockMessageAgent{} 184 jm.messageAgent = messageAgent 185 jm.workerManager = NewWorkerManager(mockBaseJobmaster.ID(), nil, jm.metadata.JobStore(), jm.metadata.UnitStateStore(), nil, nil, nil, jm.Logger(), 186 resModel.ResourceTypeLocalFile) 187 jm.taskManager = NewTaskManager("test-job", nil, nil, nil, jm.Logger(), promutil.NewFactory4Test(t.TempDir())) 188 jm.workerManager.UpdateWorkerStatus(runtime.NewWorkerStatus("task3", frameModel.WorkerDMDump, "worker3", runtime.WorkerOnline, 4)) 189 messageAgent.On("SendRequest", mock.Anything, "task3", mock.Anything, mock.Anything).Return(nil, context.DeadlineExceeded).Once() 190 jm.workerManager.UpdateWorkerStatus(runtime.NewWorkerStatus("task4", frameModel.WorkerDMDump, "worker4", runtime.WorkerOnline, 3)) 191 messageAgent.On("SendRequest", mock.Anything, "task4", mock.Anything, mock.Anything).Return(dumpStatusResp, nil).Once() 192 jm.workerManager.UpdateWorkerStatus(runtime.NewWorkerStatus("task5", frameModel.WorkerDMLoad, "worker5", runtime.WorkerOnline, 4)) 193 messageAgent.On("SendRequest", mock.Anything, "task5", mock.Anything, mock.Anything).Return(loadStatusResp, nil).Once() 194 jm.workerManager.UpdateWorkerStatus(runtime.NewWorkerStatus("task6", frameModel.WorkerDMSync, "worker6", runtime.WorkerOnline, 3)) 195 messageAgent.On("SendRequest", mock.Anything, "task6", mock.Anything, mock.Anything).Return(syncStatusResp, nil).Once() 196 197 err := jm.metadata.UnitStateStore().Put(ctx, unitState) 198 require.NoError(t, err) 199 200 // no job 201 jobStatus, err := jm.QueryJobStatus(context.Background(), nil) 202 require.Error(t, err) 203 require.Nil(t, jobStatus) 204 205 require.NoError(t, jm.metadata.JobStore().Put(context.Background(), job)) 206 207 ctx, cancel := context.WithTimeout(context.Background(), time.Second) 208 defer cancel() 209 210 jobStatus, err = jm.QueryJobStatus(ctx, []string{"task8"}) 211 require.NoError(t, err) 212 taskStatus := jobStatus.TaskStatus["task8"] 213 require.Equal(t, "", taskStatus.WorkerID) 214 require.Equal(t, "", taskStatus.ExpectedStage.String()) 215 require.Equal(t, &dmpkg.QueryStatusResponse{ErrorMsg: "task task8 for job not found"}, taskStatus.Status) 216 217 jobStatus, err = jm.QueryJobStatus(ctx, nil) 218 require.NoError(t, err) 219 require.Len(t, jobStatus.TaskStatus, 7) 220 221 for task, currentStatus := range jobStatus.TaskStatus { 222 // start-time is fixed at 2020-11-04 except task1 which is paused and don't have current status, 223 // we just check that it's > 24h (now it's 2022-12-16) 224 if task != "task1" { 225 require.Greater(t, currentStatus.Duration, 24*time.Hour) 226 } 227 // this is for passing follow test, because we can't offer the precise duration in advance 228 currentStatus.Duration = time.Second 229 jobStatus.TaskStatus[task] = currentStatus 230 } 231 232 expectedStatus := `{ 233 "job_id": "dm-jobmaster-id", 234 "task_status": { 235 "task1": { 236 "expected_stage": "Paused", 237 "worker_id": "", 238 "config_outdated": true, 239 "status": { 240 "error_message": "worker for task task1 not found", 241 "unit": "", 242 "stage": "", 243 "result": null, 244 "status": null, 245 "io_total_bytes": 0, 246 "dump_io_total_bytes": 0 247 }, 248 "duration": 1000000000 249 }, 250 "task2": { 251 "expected_stage": "Finished", 252 "worker_id": "", 253 "config_outdated": true, 254 "status": { 255 "error_message": "worker for task task2 not found", 256 "unit": "", 257 "stage": "", 258 "result": null, 259 "status": null, 260 "io_total_bytes": 0, 261 "dump_io_total_bytes": 0 262 }, 263 "duration": 1000000000 264 }, 265 "task3": { 266 "expected_stage": "Finished", 267 "worker_id": "worker3", 268 "config_outdated": false, 269 "status": { 270 "error_message": "context deadline exceeded", 271 "unit": "", 272 "stage": "", 273 "result": null, 274 "status": null, 275 "io_total_bytes": 0, 276 "dump_io_total_bytes": 0 277 }, 278 "duration": 1000000000 279 }, 280 "task4": { 281 "expected_stage": "Running", 282 "worker_id": "worker4", 283 "config_outdated": true, 284 "status": { 285 "error_message": "", 286 "unit": "DMDumpTask", 287 "stage": "Running", 288 "result": null, 289 "status": { 290 "totalTables": 10, 291 "completedTables": 1, 292 "finishedBytes": 100, 293 "finishedRows": 10, 294 "estimateTotalRows": 1000, 295 "bps": 1000, 296 "progress": "20.00 %" 297 }, 298 "io_total_bytes": 0, 299 "dump_io_total_bytes": 0 300 }, 301 "duration": 1000000000 302 }, 303 "task5": { 304 "expected_stage": "Running", 305 "worker_id": "worker5", 306 "config_outdated": false, 307 "status": { 308 "error_message": "", 309 "unit": "DMLoadTask", 310 "stage": "Paused", 311 "result": { 312 "is_canceled": true 313 }, 314 "status": { 315 "finishedBytes": 4, 316 "totalBytes": 100, 317 "progress": "4%", 318 "metaBinlog": "mysql-bin.000002, 8", 319 "metaBinlogGTID": "1-2-3", 320 "bps": 1000 321 }, 322 "io_total_bytes": 0, 323 "dump_io_total_bytes": 0 324 }, 325 "duration": 1000000000 326 }, 327 "task6": { 328 "expected_stage": "Running", 329 "worker_id": "worker6", 330 "config_outdated": true, 331 "status": { 332 "error_message": "", 333 "unit": "DMSyncTask", 334 "stage": "Error", 335 "result": { 336 "errors": [ 337 { 338 "error_code": 1, 339 "error_class": "class", 340 "error_scope": "scope", 341 "error_level": "low", 342 "message": "msg", 343 "raw_cause": "raw cause", 344 "workaround": "workaround" 345 } 346 ] 347 }, 348 "status": { 349 "masterBinlog": "mysql-bin.000002, 4", 350 "masterBinlogGtid": "1-2-20", 351 "syncerBinlog": "mysql-bin.000001, 432", 352 "syncerBinlogGtid": "1-2-10", 353 "blockingDDLs": [ 354 "ALTER TABLE db.tb ADD COLUMN a INT" 355 ], 356 "binlogType": "remote", 357 "secondsBehindMaster": 10, 358 "totalRows": 10, 359 "totalRps": 10, 360 "recentRps": 10 361 }, 362 "io_total_bytes": 0, 363 "dump_io_total_bytes": 0 364 }, 365 "duration": 1000000000 366 }, 367 "task7": { 368 "expected_stage": "Finished", 369 "worker_id": "", 370 "config_outdated": true, 371 "status": { 372 "error_message": "worker for task task7 not found", 373 "unit": "", 374 "stage": "", 375 "result": null, 376 "status": null, 377 "io_total_bytes": 0, 378 "dump_io_total_bytes": 0 379 }, 380 "duration": 1000000000 381 } 382 }, 383 "finished_unit_status": { 384 "task2": [ 385 { 386 "Unit": "DMDumpTask", 387 "Task": "task2", 388 "Stage": "Finished", 389 "CfgModRevision": 3, 390 "StageUpdatedTime": "2020-11-04T19:47:57.43382274+08:00", 391 "Result": null, 392 "Status": { 393 "totalTables": 10, 394 "completedTables": 1, 395 "finishedBytes": 100, 396 "finishedRows": 10, 397 "estimateTotalRows": 1000, 398 "bps": 1000, 399 "progress": "20.00 %" 400 }, 401 "Duration": 3600000000000 402 }, 403 { 404 "Unit": "DMLoadTask", 405 "Task": "task2", 406 "Stage": "Finished", 407 "CfgModRevision": 3, 408 "StageUpdatedTime": "2020-11-04T20:47:57.43382274+08:00", 409 "Result": null, 410 "Status": { 411 "finishedBytes": 4, 412 "totalBytes": 100, 413 "progress": "4%", 414 "metaBinlog": "mysql-bin.000002, 8", 415 "metaBinlogGTID": "1-2-3", 416 "bps": 1000 417 }, 418 "Duration": 60000000000 419 } 420 ], 421 "task7": [ 422 { 423 "Unit": "DMDumpTask", 424 "Task": "task7", 425 "Stage": "Finished", 426 "CfgModRevision": 4, 427 "StageUpdatedTime": "2020-11-04T19:47:57.43382274+08:00", 428 "Result": null, 429 "Status": { 430 "totalTables": 10, 431 "completedTables": 1, 432 "finishedBytes": 100, 433 "finishedRows": 10, 434 "estimateTotalRows": 1000, 435 "bps": 1000, 436 "progress": "20.00 %" 437 }, 438 "Duration": 3600000000000 439 }, 440 { 441 "Unit": "DMLoadTask", 442 "Task": "task7", 443 "Stage": "Finished", 444 "CfgModRevision": 4, 445 "StageUpdatedTime": "2020-11-04T20:47:57.43382274+08:00", 446 "Result": null, 447 "Status": { 448 "finishedBytes": 4, 449 "totalBytes": 100, 450 "progress": "4%", 451 "metaBinlog": "mysql-bin.000002, 8", 452 "metaBinlogGTID": "1-2-3", 453 "bps": 1000 454 }, 455 "Duration": 60000000000 456 } 457 ] 458 } 459 }` 460 status, err := json.MarshalIndent(jobStatus, "", "\t") 461 require.NoError(t, err) 462 require.Equal(t, expectedStatus, string(status)) 463 } 464 465 func TestOperateTask(t *testing.T) { 466 jm := &JobMaster{ 467 taskManager: NewTaskManager("test-job", nil, metadata.NewJobStore(kvmock.NewMetaMock(), log.L()), nil, log.L(), promutil.NewFactory4Test(t.TempDir())), 468 } 469 require.EqualError(t, jm.operateTask(context.Background(), dmpkg.Delete, nil, nil), fmt.Sprintf("unsupported op type %d for operate task", dmpkg.Delete)) 470 require.EqualError(t, jm.operateTask(context.Background(), dmpkg.Pause, nil, nil), "state not found") 471 } 472 473 func TestGetJobCfg(t *testing.T) { 474 kvClient := kvmock.NewMetaMock() 475 jm := &JobMaster{ 476 metadata: metadata.NewMetaData(kvClient, log.L()), 477 } 478 jobCfg, err := jm.GetJobCfg(context.Background()) 479 require.EqualError(t, err, "state not found") 480 require.Nil(t, jobCfg) 481 482 jobCfg = &config.JobCfg{TaskMode: dmconfig.ModeFull, Upstreams: []*config.UpstreamCfg{{}}} 483 job := metadata.NewJob(jobCfg) 484 jm.metadata.JobStore().Put(context.Background(), job) 485 486 jobCfg, err = jm.GetJobCfg(context.Background()) 487 require.NoError(t, err) 488 require.Equal(t, dmconfig.ModeFull, jobCfg.TaskMode) 489 } 490 491 func TestUpdateJobCfg(t *testing.T) { 492 var ( 493 mockBaseJobmaster = &MockBaseJobmaster{t: t} 494 metaKVClient = kvmock.NewMetaMock() 495 mockCheckpointAgent = &MockCheckpointAgent{} 496 messageAgent = &dmpkg.MockMessageAgent{} 497 jobCfg = &config.JobCfg{} 498 jm = &JobMaster{ 499 BaseJobMaster: mockBaseJobmaster, 500 metadata: metadata.NewMetaData(metaKVClient, log.L()), 501 checkpointAgent: mockCheckpointAgent, 502 } 503 ) 504 jm.taskManager = NewTaskManager("test-job", nil, jm.metadata.JobStore(), messageAgent, jm.Logger(), promutil.NewFactory4Test(t.TempDir())) 505 jm.workerManager = NewWorkerManager(mockBaseJobmaster.ID(), nil, jm.metadata.JobStore(), jm.metadata.UnitStateStore(), jm, messageAgent, mockCheckpointAgent, jm.Logger(), 506 resModel.ResourceTypeLocalFile) 507 funcBackup := master.CheckAndAdjustSourceConfigFunc 508 master.CheckAndAdjustSourceConfigFunc = func(ctx context.Context, cfg *dmconfig.SourceConfig) error { return nil } 509 defer func() { 510 master.CheckAndAdjustSourceConfigFunc = funcBackup 511 }() 512 513 precheckError := errors.New("precheck error") 514 checker.CheckSyncConfigFunc = func(_ context.Context, _ []*dmconfig.SubTaskConfig, _, _ int64) (string, error) { 515 return "", precheckError 516 } 517 require.NoError(t, jobCfg.DecodeFile(jobTemplatePath)) 518 verDB := conn.InitVersionDB() 519 verDB.ExpectQuery("SHOW GLOBAL VARIABLES LIKE 'version'").WillReturnError(errors.New("database error")) 520 require.EqualError(t, jm.UpdateJobCfg(context.Background(), jobCfg), "database error") 521 522 verDB = conn.InitVersionDB() 523 verDB.ExpectQuery("SHOW GLOBAL VARIABLES LIKE 'version'").WillReturnRows(sqlmock.NewRows([]string{"Variable_name", "Value"}). 524 AddRow("version", "5.7.25-TiDB-v6.1.0")) 525 checker.CheckSyncConfigFunc = func(_ context.Context, _ []*dmconfig.SubTaskConfig, _, _ int64) (string, error) { 526 return "check pass", nil 527 } 528 require.EqualError(t, jm.UpdateJobCfg(context.Background(), jobCfg), "state not found") 529 530 err := jm.taskManager.OperateTask(context.Background(), dmpkg.Create, jobCfg, nil) 531 require.NoError(t, err) 532 verDB = conn.InitVersionDB() 533 verDB.ExpectQuery("SHOW GLOBAL VARIABLES LIKE 'version'").WillReturnRows(sqlmock.NewRows([]string{"Variable_name", "Value"}). 534 AddRow("version", "5.7.25-TiDB-v6.1.0")) 535 checker.CheckSyncConfigFunc = func(_ context.Context, _ []*dmconfig.SubTaskConfig, _, _ int64) (string, error) { 536 return "check pass", nil 537 } 538 } 539 540 func TestBinlog(t *testing.T) { 541 kvClient := kvmock.NewMetaMock() 542 messageAgent := &dmpkg.MockMessageAgent{} 543 jm := &JobMaster{ 544 metadata: metadata.NewMetaData(kvClient, log.L()), 545 messageAgent: messageAgent, 546 } 547 resp, err := jm.Binlog(context.Background(), &dmpkg.BinlogRequest{}) 548 require.EqualError(t, err, "state not found") 549 require.Nil(t, resp) 550 551 messageAgent.On("SendRequest", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(&dmpkg.CommonTaskResponse{Msg: "msg"}, nil).Once() 552 messageAgent.On("SendRequest", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil, errors.New("error")).Once() 553 job := metadata.NewJob(&config.JobCfg{Upstreams: []*config.UpstreamCfg{ 554 {MySQLInstance: dmconfig.MySQLInstance{SourceID: "task1"}}, 555 {MySQLInstance: dmconfig.MySQLInstance{SourceID: "task2"}}, 556 }}) 557 jm.metadata.JobStore().Put(context.Background(), job) 558 resp, err = jm.Binlog(context.Background(), &dmpkg.BinlogRequest{}) 559 require.Nil(t, err) 560 require.Equal(t, "", resp.ErrorMsg) 561 errMsg := resp.Results["task1"].ErrorMsg + resp.Results["task2"].ErrorMsg 562 msg := resp.Results["task1"].Msg + resp.Results["task2"].Msg 563 require.Equal(t, "error", errMsg) 564 require.Equal(t, "msg", msg) 565 } 566 567 func TestBinlogSchema(t *testing.T) { 568 messageAgent := &dmpkg.MockMessageAgent{} 569 jm := &JobMaster{ 570 messageAgent: messageAgent, 571 } 572 resp := jm.BinlogSchema(context.Background(), &dmpkg.BinlogSchemaRequest{}) 573 require.Equal(t, "must specify at least one source", resp.ErrorMsg) 574 575 messageAgent.On("SendRequest", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(&dmpkg.CommonTaskResponse{Msg: "msg"}, nil).Once() 576 messageAgent.On("SendRequest", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil, errors.New("error")).Once() 577 resp = jm.BinlogSchema(context.Background(), &dmpkg.BinlogSchemaRequest{Sources: []string{"task1", "task2"}}) 578 require.Equal(t, "", resp.ErrorMsg) 579 errMsg := resp.Results["task1"].ErrorMsg + resp.Results["task2"].ErrorMsg 580 msg := resp.Results["task1"].Msg + resp.Results["task2"].Msg 581 require.Equal(t, "error", errMsg) 582 require.Equal(t, "msg", msg) 583 } 584 585 func sortString(w string) string { 586 s := strings.Split(w, "") 587 sort.Strings(s) 588 return strings.Join(s, "") 589 }