storj.io/minio@v0.0.0-20230509071714-0cbc90f649b1/mint/run/core/aws-sdk-ruby/aws-stub-tests.rb (about) 1 #!/usr/bin/env ruby 2 # 3 # Mint (C) 2017 Minio, Inc. 4 # 5 # Licensed under the Apache License, Version 2.0 (the "License"); 6 # you may not use this file except in compliance with the License. 7 # You may obtain a copy of the License at 8 # 9 # http://www.apache.org/licenses/LICENSE-2.0 10 # 11 # Unless required by applicable law or agreed to in writing, software 12 # distributed under the License is distributed on an "AS IS" BASIS, 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 # See the License for the specific language governing permissions and 15 # limitations under the License. 16 # 17 18 require 'aws-sdk' 19 require 'securerandom' 20 require 'net/http' 21 require 'multipart_body' 22 23 # For aws-sdk ruby tests to run, setting the following 24 # environment variables is mandatory. 25 # SERVER_ENDPOINT: <ip:port> address of the minio server tests will run against 26 # ACCESS_KEY: access key for the minio server 27 # SECRET_KEY: secreet key for the minio server 28 # SERVER_REGION: region minio server is setup to run 29 # ENABLE_HTTPS: (1|0) turn on/off to specify https or 30 # http services minio server is running on 31 # MINT_DATA_DIR: Data directory where test data files are stored 32 33 class AwsSdkRubyTest 34 # Set variables necessary to create an s3 client instance. 35 # Get them from the environment variables 36 37 # Region information, eg. "us-east-1" 38 region = ENV['SERVER_REGION'] ||= 'SERVER_REGION is not set' 39 # Minio server, eg. "play.minio.io:9000" 40 access_key_id = ENV['ACCESS_KEY'] ||= 'ACCESS_KEY is not set' 41 secret_access_key = ENV['SECRET_KEY'] ||= 'SECRET_KEY is not set' 42 enable_https = ENV['ENABLE_HTTPS'] 43 end_point = ENV['SERVER_ENDPOINT'] ||= 'SERVER_ENDPOINT is not set' 44 endpoint = enable_https == '1' ? 'https://' + end_point : 'http://' + end_point 45 46 # Create s3 resource instance,"s3" 47 @@s3 = Aws::S3::Resource.new( 48 region: region, 49 endpoint: endpoint, 50 access_key_id: access_key_id, 51 secret_access_key: secret_access_key, 52 force_path_style: true) 53 54 def initialize_log_output(meth, alert = nil) 55 # Initialize and return log content in log_output hash table 56 57 # Collect args in args_arr 58 args_arr = method(meth).parameters.flatten.map(&:to_s) 59 .reject { |x| x == 'req' || x == 'opt' } 60 # Create and return log output content 61 { name: 'aws-sdk-ruby', 62 function: "#{meth}(#{args_arr.join(',')})", # method name and arguments 63 args: args_arr, # array of arg names. This'll be replaced with a 64 # a arg/value pairs insdie the caller method 65 duration: 0, # test runtime duration in seconds 66 alert: alert, 67 message: nil, 68 error: nil } 69 end 70 71 def random_bucket_name 72 'aws-sdk-ruby-bucket-' + SecureRandom.hex(6) 73 end 74 75 def calculate_duration(t2, t1) 76 # Durations are in miliseconds, with precision of 2 decimal places 77 ((t2 - t1) * 1000).round(2) 78 end 79 80 def print_log(log_output, start_time) 81 # Calculate duration in miliseconds 82 log_output[:duration] = calculate_duration(Time.now, start_time) 83 # Get rid of the log_output fields if nil 84 puts log_output.delete_if{|k, value| value == nil}.to_json 85 # Exit at the first failure 86 exit 1 if log_output[:status] == 'FAIL' 87 end 88 89 def cleanUp(buckets, log_output) 90 # Removes objects and bucket if bucket exists 91 bucket_name = '' 92 buckets.each do |b| 93 bucket_name = b 94 if bucketExistsWrapper(b, log_output) 95 removeObjectsWrapper(b, log_output) 96 removeBucketWrapper(b, log_output) 97 end 98 end 99 rescue => e 100 raise "Failed to clean-up bucket '#{bucket_name}', #{e}" 101 end 102 103 # 104 # API commands/methods 105 # 106 def makeBucket(bucket_name) 107 # Creates a bucket, "bucket_name" 108 # on S3 client , "s3". 109 # Returns bucket_name if already exists 110 @@s3.bucket(bucket_name).exists? ? @@s3.bucket(bucket_name) : @@s3.create_bucket(bucket: bucket_name) 111 rescue => e 112 raise e 113 end 114 115 def makeBucketWrapper(bucket_name, log_output) 116 makeBucket(bucket_name) 117 rescue => e 118 log_output[:function] = "makeBucket(bucket_name)" 119 log_output[:args] = {'bucket_name': bucket_name} 120 raise e 121 end 122 123 def removeBucket(bucket_name) 124 # Deletes/removes bucket, "bucket_name" on S3 client, "s3" 125 @@s3.bucket(bucket_name).delete 126 rescue => e 127 raise e 128 end 129 130 def removeBucketWrapper(bucket_name, log_output) 131 removeBucket(bucket_name) 132 rescue => e 133 log_output[:function] = "removeBucket(bucket_name)" 134 log_output[:args] = {'bucket_name': bucket_name} 135 raise e 136 end 137 138 def putObject(bucket_name, file) 139 # Creates "file" (full path) in bucket, "bucket_name", 140 # on S3 client, "s3" 141 file_name = File.basename(file) 142 @@s3.bucket(bucket_name).object(file_name).upload_file(file) 143 rescue => e 144 raise e 145 end 146 147 def putObjectWrapper(bucket_name, file, log_output) 148 putObject(bucket_name, file) 149 rescue => e 150 log_output[:function] = "putObject(bucket_name, file)" 151 log_output[:args] = {'bucket_name': bucket_name, 152 'file': file} 153 raise e 154 end 155 156 def getObject(bucket_name, file, destination) 157 # Gets/Downloads file, "file", 158 # from bucket, "bucket_name", of S3 client, "s3" 159 file_name = File.basename(file) 160 dest = File.join(destination, file_name) 161 @@s3.bucket(bucket_name).object(file_name).get(response_target: dest) 162 rescue => e 163 raise e 164 end 165 166 def getObjectWrapper(bucket_name, file, destination, log_output) 167 getObject(bucket_name, file, destination) 168 rescue => e 169 log_output[:function] = "getObject(bucket_name, file)" 170 log_output[:args] = {'bucket_name': bucket_name, 171 'file': file, 172 'destination': destination} 173 raise e 174 end 175 176 def copyObject(source_bucket_name, target_bucket_name, source_file_name, target_file_name = '') 177 # Copies file, "file_name", from source bucket, 178 # "source_bucket_name", to target bucket, 179 # "target_bucket_name", on S3 client, "s3" 180 target_file_name = source_file_name if target_file_name.empty? 181 source = @@s3.bucket(source_bucket_name) 182 target = @@s3.bucket(target_bucket_name) 183 source_obj = source.object(source_file_name) 184 target_obj = target.object(target_file_name) 185 source_obj.copy_to(target_obj) 186 rescue => e 187 raise e 188 end 189 190 def copyObjectWrapper(source_bucket_name, target_bucket_name, source_file_name, target_file_name = '', log_output) 191 copyObject(source_bucket_name, target_bucket_name, source_file_name, target_file_name) 192 rescue => e 193 log_output[:function] = 'copyObject(source_bucket_name, target_bucket_name, source_file_name, target_file_name = '')' 194 log_output[:args] = {'source_bucket_name': source_bucket_name, 195 'target_bucket_name': target_bucket_name, 196 'source_file_name': source_file_name, 197 'target_file_name': target_file_name} 198 raise e 199 end 200 201 def removeObject(bucket_name, file) 202 # Deletes file in bucket, 203 # "bucket_name", on S3 client, "s3". 204 # If file, "file_name" does not exist, 205 # it quietly returns without any error message 206 @@s3.bucket(bucket_name).object(file).delete 207 rescue => e 208 raise e 209 end 210 211 def removeObjectWrapper(bucket_name, file_name, log_output) 212 removeObject(bucket_name, file_name) 213 rescue => e 214 log_output[:function] = "removeObject(bucket_name, file_name)" 215 log_output[:args] = {'bucket_name': bucket_name, 216 'file_name': file_name} 217 raise e 218 end 219 220 def removeObjects(bucket_name) 221 # Deletes all files in bucket, "bucket_name" 222 # on S3 client, "s3" 223 file_name = '' 224 @@s3.bucket(bucket_name).objects.each do |obj| 225 file_name = obj.key 226 obj.delete 227 end 228 rescue => e 229 raise "File name: '#{file_name}', #{e}" 230 end 231 232 def removeObjectsWrapper(bucket_name, log_output) 233 removeObjects(bucket_name) 234 rescue => e 235 log_output[:function] = 'removeObjects(bucket_name)' 236 log_output[:args] = {'bucket_name': bucket_name} 237 raise e 238 end 239 240 def listBuckets 241 # Returns an array of bucket names on S3 client, "s3" 242 bucket_name_list = [] 243 @@s3.buckets.each do |b| 244 bucket_name_list.push(b.name) 245 end 246 return bucket_name_list 247 rescue => e 248 raise e 249 end 250 251 def listBucketsWrapper(log_output) 252 listBuckets 253 rescue => e 254 log_output[:function] = 'listBuckets' 255 log_output[:args] = {} 256 raise e 257 end 258 259 def listObjects(bucket_name) 260 # Returns an array of object/file names 261 # in bucket, "bucket_name", on S3 client, "s3" 262 object_list = [] 263 @@s3.bucket(bucket_name).objects.each do |obj| 264 object_list.push(obj.key) 265 end 266 return object_list 267 rescue => e 268 raise e 269 end 270 271 def listObjectsWrapper(bucket_name, log_output) 272 listObjects(bucket_name) 273 rescue => e 274 log_output[:function] = 'listObjects(bucket_name)' 275 log_output[:args] = {'bucket_name': bucket_name} 276 raise e 277 end 278 279 def statObject(bucket_name, file_name) 280 return @@s3.bucket(bucket_name).object(file_name).exists? 281 rescue => e 282 raise e 283 end 284 285 def statObjectWrapper(bucket_name, file_name, log_output) 286 statObject(bucket_name, file_name) 287 rescue => e 288 log_output[:function] = 'statObject(bucket_name, file_name)' 289 log_output[:args] = {'bucket_name': bucket_name, 290 'file_name': file_name} 291 raise e 292 end 293 294 def bucketExists?(bucket_name) 295 # Returns true if bucket, "bucket_name", exists, 296 # false otherwise 297 return @@s3.bucket(bucket_name).exists? 298 rescue => e 299 raise e 300 end 301 302 def bucketExistsWrapper(bucket_name, log_output) 303 bucketExists?(bucket_name) 304 rescue => e 305 log_output[:function] = 'bucketExists?(bucket_name)' 306 log_output[:args] = {'bucket_name': bucket_name} 307 raise e 308 end 309 310 def presignedGet(bucket_name, file_name) 311 # Returns download/get url 312 obj = @@s3.bucket(bucket_name).object(file_name) 313 return obj.presigned_url(:get, expires_in: 600) 314 rescue => e 315 raise e 316 end 317 318 def presignedGetWrapper(bucket_name, file_name, log_output) 319 presignedGet(bucket_name, file_name) 320 rescue => e 321 log_output[:function] = 'presignedGet(bucket_name, file_name)' 322 log_output[:args] = {'bucket_name': bucket_name, 323 'file_name': file_name} 324 raise e 325 end 326 327 def presignedPut(bucket_name, file_name) 328 # Returns put url 329 obj = @@s3.bucket(bucket_name).object(file_name) 330 return obj.presigned_url(:put, expires_in: 600) 331 rescue => e 332 raise e 333 end 334 335 def presignedPutWrapper(bucket_name, file_name, log_output) 336 presignedPut(bucket_name, file_name) 337 rescue => e 338 log_output[:function] = 'presignedPut(bucket_name, file_name)' 339 log_output[:args] = {'bucket_name': bucket_name, 340 'file_name': file_name} 341 raise e 342 end 343 344 def presignedPost(bucket_name, file_name, expires_in_sec, max_byte_size) 345 # Returns upload/post url 346 obj = @@s3.bucket(bucket_name).object(file_name) 347 return obj.presigned_post(expires: Time.now + expires_in_sec, 348 content_length_range: 1..max_byte_size) 349 rescue => e 350 raise e 351 end 352 353 def presignedPostWrapper(bucket_name, file_name, expires_in_sec, max_byte_size, log_output) 354 presignedPost(bucket_name, file_name, expires_in_sec, max_byte_size) 355 rescue => e 356 log_output[:function] = 'presignedPost(bucket_name, file_name, expires_in_sec, max_byte_size)' 357 log_output[:args] = {'bucket_name': bucket_name, 358 'file_name': file_name, 359 'expires_in_sec': expires_in_sec, 360 'max_byte_size': max_byte_size} 361 raise e 362 end 363 364 # To be addressed. S3 API 'get_bucket_policy' does not work! 365 # def getBucketPolicy(bucket_name) 366 # # Returns bucket policy 367 # return @@s3.bucket(bucket_name).get_bucket_policy 368 # rescue => e 369 # raise e 370 # end 371 372 # 373 # Test case methods 374 # 375 def listBucketsTest 376 # Tests listBuckets api command by creating 377 # new buckets from bucket_name_list 378 379 # get 2 different random bucket names and create a list 380 bucket_name_list = [random_bucket_name, random_bucket_name] 381 # Initialize hash table, 'log_output' 382 log_output = initialize_log_output('listBuckets') 383 # Prepare arg/value hash table and set it in log_output 384 arg_value_hash = {} 385 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 386 log_output[:args] = arg_value_hash 387 388 begin 389 start_time = Time.now 390 prev_total_buckets = listBucketsWrapper(log_output).length 391 new_buckets = bucket_name_list.length 392 bucket_name_list.each do |b| 393 makeBucketWrapper(b, log_output) 394 end 395 new_total_buckets = prev_total_buckets + new_buckets 396 if new_total_buckets >= prev_total_buckets + new_buckets 397 log_output[:status] = 'PASS' 398 else 399 log_output[:error] = 'Could not find expected number of buckets' 400 log_output[:status] = 'FAIL' 401 end 402 cleanUp(bucket_name_list, log_output) 403 rescue => log_output[:error] 404 log_output[:status] = 'FAIL' 405 end 406 407 print_log(log_output, start_time) 408 end 409 410 def makeBucketTest 411 # Tests makeBucket api command. 412 413 # get random bucket name 414 bucket_name = random_bucket_name 415 # Initialize hash table, 'log_output' 416 log_output = initialize_log_output('makeBucket') 417 # Prepare arg/value hash table and set it in log_output 418 arg_value_hash = {} 419 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 420 log_output[:args] = arg_value_hash 421 422 begin 423 start_time = Time.now 424 makeBucketWrapper(bucket_name, log_output) 425 426 if bucketExistsWrapper(bucket_name, log_output) 427 log_output[:status] = 'PASS' 428 else 429 log_output[:error] = 'Bucket expected to be created does not exist' 430 log_output[:status] = 'FAIL' 431 end 432 cleanUp([bucket_name], log_output) 433 rescue => log_output[:error] 434 log_output[:status] = 'FAIL' 435 end 436 437 print_log(log_output, start_time) 438 end 439 440 def bucketExistsNegativeTest 441 # Tests bucketExists api command. 442 443 # get random bucket name 444 bucket_name = random_bucket_name 445 # Initialize hash table, 'log_output' 446 log_output = initialize_log_output('bucketExists?') 447 # Prepare arg/value hash table and set it in log_output 448 arg_value_hash = {} 449 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 450 log_output[:args] = arg_value_hash 451 452 begin 453 start_time = Time.now 454 if !bucketExistsWrapper(bucket_name, log_output) 455 log_output[:status] = 'PASS' 456 else 457 log_output[:error] = "Failed to return 'false' for a non-existing bucket" 458 log_output[:status] = 'FAIL' 459 end 460 cleanUp([bucket_name], log_output) 461 rescue => log_output[:error] 462 log_output[:status] = 'FAIL' 463 end 464 465 print_log(log_output, start_time) 466 end 467 468 def removeBucketTest 469 # Tests removeBucket api command. 470 471 # get a random bucket name 472 bucket_name = random_bucket_name 473 # Initialize hash table, 'log_output' 474 log_output = initialize_log_output('removeBucket') 475 # Prepare arg/value hash table and set it in log_output 476 arg_value_hash = {} 477 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 478 log_output[:args] = arg_value_hash 479 480 begin 481 start_time = Time.now 482 makeBucketWrapper(bucket_name, log_output) 483 removeBucketWrapper(bucket_name, log_output) 484 if !bucketExistsWrapper(bucket_name, log_output) 485 log_output[:status] = 'PASS' 486 else 487 log_output[:error] = 'Bucket expected to be removed still exists' 488 log_output[:status] = 'FAIL' 489 end 490 cleanUp([bucket_name], log_output) 491 rescue => log_output[:error] 492 log_output[:status] = 'FAIL' 493 end 494 495 print_log(log_output, start_time) 496 end 497 498 def putObjectTest(file) 499 # Tests putObject api command by uploading a file 500 501 # get random bucket name 502 bucket_name = random_bucket_name 503 # Initialize hash table, 'log_output' 504 log_output = initialize_log_output('putObject') 505 # Prepare arg/value hash table and set it in log_output 506 arg_value_hash = {} 507 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 508 log_output[:args] = arg_value_hash 509 510 begin 511 start_time = Time.now 512 makeBucketWrapper(bucket_name, log_output) 513 putObjectWrapper(bucket_name, file, log_output) 514 if statObjectWrapper(bucket_name, File.basename(file), log_output) 515 log_output[:status] = 'PASS' 516 else 517 log_output[:error] = "Status for the created object returned 'false'" 518 log_output[:status] = 'FAIL' 519 end 520 cleanUp([bucket_name], log_output) 521 rescue => log_output[:error] 522 log_output[:status] = 'FAIL' 523 end 524 525 print_log(log_output, start_time) 526 end 527 528 def removeObjectTest(file) 529 # Tests removeObject api command by uploading and removing a file 530 531 # get random bucket name 532 bucket_name = random_bucket_name 533 # Initialize hash table, 'log_output' 534 log_output = initialize_log_output('removeObject') 535 # Prepare arg/value hash table and set it in log_output 536 arg_value_hash = {} 537 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 538 log_output[:args] = arg_value_hash 539 540 begin 541 start_time = Time.now 542 makeBucketWrapper(bucket_name, log_output) 543 putObjectWrapper(bucket_name, file, log_output) 544 removeObjectWrapper(bucket_name, File.basename(file), log_output) 545 if !statObjectWrapper(bucket_name, File.basename(file), log_output) 546 log_output[:status] = 'PASS' 547 else 548 log_output[:error] = "Status for the removed object returned 'true'" 549 log_output[:status] = 'FAIL' 550 end 551 cleanUp([bucket_name], log_output) 552 rescue => log_output[:error] 553 log_output[:status] = 'FAIL' 554 end 555 556 print_log(log_output, start_time) 557 end 558 559 def getObjectTest(file, destination) 560 # Tests getObject api command 561 562 # get random bucket name 563 bucket_name = random_bucket_name 564 # Initialize hash table, 'log_output' 565 log_output = initialize_log_output('getObject') 566 # Prepare arg/value hash table and set it in log_output 567 arg_value_hash = {} 568 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 569 log_output[:args] = arg_value_hash 570 571 begin 572 start_time = Time.now 573 makeBucketWrapper(bucket_name, log_output) 574 putObjectWrapper(bucket_name, file, log_output) 575 getObjectWrapper(bucket_name, file, destination, log_output) 576 if system("ls -l #{destination} > /dev/null") 577 log_output[:status] = 'PASS' 578 else 579 log_output[:error] = "Downloaded object does not exist at #{destination}" 580 log_output[:status] = 'FAIL' 581 end 582 cleanUp([bucket_name], log_output) 583 rescue => log_output[:error] 584 log_output[:status] = 'FAIL' 585 end 586 587 print_log(log_output, start_time) 588 end 589 590 def listObjectsTest(file_list) 591 # Tests listObjects api command 592 593 # get random bucket name 594 bucket_name = random_bucket_name 595 # Initialize hash table, 'log_output' 596 log_output = initialize_log_output('listObjects') 597 # Prepare arg/value hash table and set it in log_output 598 arg_value_hash = {} 599 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 600 log_output[:args] = arg_value_hash 601 602 begin 603 start_time = Time.now 604 makeBucketWrapper(bucket_name, log_output) 605 # Put all objects into the bucket 606 file_list.each do |f| 607 putObjectWrapper(bucket_name, f, log_output) 608 end 609 # Total number of files uploaded 610 expected_no = file_list.length 611 # Actual number is what api returns 612 actual_no = listObjectsWrapper(bucket_name, log_output).length 613 # Compare expected and actual values 614 if expected_no == actual_no 615 log_output[:status] = 'PASS' 616 else 617 log_output[:error] = 'Expected and actual number of listed files/objects do not match!' 618 log_output[:status] = 'FAIL' 619 end 620 cleanUp([bucket_name], log_output) 621 rescue => log_output[:error] 622 log_output[:status] = 'FAIL' 623 end 624 625 print_log(log_output, start_time) 626 end 627 628 def copyObjectTest(data_dir, source_file_name, target_file_name = '') 629 # Tests copyObject api command 630 631 # get random bucket names 632 source_bucket_name = random_bucket_name 633 target_bucket_name = random_bucket_name 634 # Initialize hash table, 'log_output' 635 log_output = initialize_log_output('copyObject') 636 # Prepare arg/value hash table and set it in log_output 637 arg_value_hash = {} 638 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 639 log_output[:args] = arg_value_hash 640 641 begin 642 start_time = Time.now 643 target_file_name = source_file_name if target_file_name.empty? 644 makeBucketWrapper(source_bucket_name, log_output) 645 makeBucketWrapper(target_bucket_name, log_output) 646 putObjectWrapper(source_bucket_name, 647 File.join(data_dir, source_file_name), log_output) 648 copyObjectWrapper(source_bucket_name, target_bucket_name, 649 source_file_name, target_file_name, log_output) 650 # Check if copy worked fine 651 if statObjectWrapper(target_bucket_name, target_file_name, log_output) 652 log_output[:status] = 'PASS' 653 else 654 log_output[:error] = 'Copied file could not be found in the expected location' 655 log_output[:status] = 'FAIL' 656 end 657 cleanUp([source_bucket_name, target_bucket_name], log_output) 658 rescue => log_output[:error] 659 log_output[:status] = 'FAIL' 660 end 661 662 print_log(log_output, start_time) 663 end 664 665 def presignedGetObjectTest(data_dir, file_name) 666 # Tests presignedGetObject api command 667 668 # get random bucket name 669 bucket_name = random_bucket_name 670 # Initialize hash table, 'log_output' 671 log_output = initialize_log_output('presignedGet') 672 # Prepare arg/value hash table and set it in log_output 673 arg_value_hash = {} 674 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 675 log_output[:args] = arg_value_hash 676 677 begin 678 start_time = Time.now 679 makeBucketWrapper(bucket_name, log_output) 680 file = File.join(data_dir, file_name) 681 # Get check sum value without the file name 682 cksum_orig = `cksum #{file}`.split[0..1] 683 putObjectWrapper(bucket_name, file, log_output) 684 get_url = presignedGetWrapper(bucket_name, file_name, log_output) 685 # Download the file using the URL 686 # generated by presignedGet api command 687 `wget -O /tmp/#{file_name} '#{get_url}' > /dev/null 2>&1` 688 # Get check sum value for the downloaded file 689 # Split to get rid of the file name 690 cksum_new = `cksum /tmp/#{file_name}`.split[0..1] 691 692 # Check if check sum values for the orig file 693 # and the downloaded file match 694 if cksum_orig == cksum_new 695 log_output[:status] = 'PASS' 696 else 697 log_output[:error] = 'Check sum values do NOT match' 698 log_output[:status] = 'FAIL' 699 end 700 cleanUp([bucket_name], log_output) 701 rescue => log_output[:error] 702 log_output[:status] = 'FAIL' 703 end 704 705 print_log(log_output, start_time) 706 end 707 708 def presignedPutObjectTest(data_dir, file_name) 709 # Tests presignedPutObject api command 710 711 # get random bucket name 712 bucket_name = random_bucket_name 713 # Initialize hash table, 'log_output' 714 log_output = initialize_log_output('presignedPut') 715 # Prepare arg/value hash table and set it in log_output 716 arg_value_hash = {} 717 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 718 log_output[:args] = arg_value_hash 719 720 begin 721 start_time = Time.now 722 makeBucketWrapper(bucket_name, log_output) 723 file = File.join(data_dir, file_name) 724 725 # Get check sum value and 726 # split to get rid of the file name 727 cksum_orig = `cksum #{file}`.split[0..1] 728 729 # Generate presigned Put URL and parse it 730 uri = URI.parse(presignedPutWrapper(bucket_name, file_name, log_output)) 731 request = Net::HTTP::Put.new(uri.request_uri, 'x-amz-acl' => 'public-read') 732 request.body = IO.read(File.join(data_dir, file_name)) 733 734 http = Net::HTTP.new(uri.host, uri.port) 735 http.use_ssl = true if ENV['ENABLE_HTTPS'] == '1' 736 737 http.request(request) 738 739 if statObjectWrapper(bucket_name, file_name, log_output) 740 getObjectWrapper(bucket_name, file_name, '/tmp', log_output) 741 cksum_new = `cksum /tmp/#{file_name}`.split[0..1] 742 # Check if check sum values of the orig file 743 # and the downloaded file match 744 if cksum_orig == cksum_new 745 log_output[:status] = 'PASS' 746 else 747 log_output[:error] = 'Check sum values do NOT match' 748 log_output[:status] = 'FAIL' 749 end 750 else 751 log_output[:error] = 'Expected to be created object does NOT exist' 752 log_output[:status] = 'FAIL' 753 end 754 cleanUp([bucket_name], log_output) 755 rescue => log_output[:error] 756 log_output[:status] = 'FAIL' 757 end 758 759 print_log(log_output, start_time) 760 end 761 762 def presignedPostObjectTest(data_dir, file_name, 763 expires_in_sec, max_byte_size) 764 # Tests presignedPostObject api command 765 766 # get random bucket name 767 bucket_name = random_bucket_name 768 # Initialize hash table, 'log_output' 769 log_output = initialize_log_output('presignedPost') 770 # Prepare arg/value hash table and set it in log_output 771 arg_value_hash = {} 772 log_output[:args].each { |x| arg_value_hash[:"#{x}"] = eval x.to_s } 773 log_output[:args] = arg_value_hash 774 775 begin 776 start_time = Time.now 777 makeBucketWrapper(bucket_name, log_output) 778 file = File.join(data_dir, file_name) 779 780 # Get check sum value and split it 781 # into parts to get rid of the file name 782 cksum_orig = `cksum #{file}`.split[0..1] 783 # Create the presigned POST url 784 post = presignedPostWrapper(bucket_name, file_name, 785 expires_in_sec, max_byte_size, log_output) 786 787 # Prepare multi parts array for POST command request 788 file_part = Part.new name: 'file', 789 body: IO.read(File.join(data_dir, file_name)), 790 filename: file_name, 791 content_type: 'application/octet-stream' 792 parts = [file_part] 793 # Add POST fields into parts array 794 post.fields.each do |field, value| 795 parts.push(Part.new(field, value)) 796 end 797 boundary = "---------------------------#{rand(10_000_000_000_000_000)}" 798 body_parts = MultipartBody.new parts, boundary 799 800 # Parse presigned Post URL 801 uri = URI.parse(post.url) 802 803 # Create the HTTP objects 804 http = Net::HTTP.new(uri.host, uri.port) 805 http.use_ssl = true if ENV['ENABLE_HTTPS'] == '1' 806 request = Net::HTTP::Post.new(uri.request_uri) 807 request.body = body_parts.to_s 808 request.content_type = "multipart/form-data; boundary=#{boundary}" 809 # Send the request 810 log_output[:error] = http.request(request) 811 812 if statObjectWrapper(bucket_name, file_name, log_output) 813 getObjectWrapper(bucket_name, file_name, '/tmp', log_output) 814 cksum_new = `cksum /tmp/#{file_name}`.split[0..1] 815 # Check if check sum values of the orig file 816 # and the downloaded file match 817 if cksum_orig == cksum_new 818 log_output[:status] = 'PASS' 819 # FIXME: HTTP No Content error, status code=204 is returned as error 820 log_output[:error] = nil 821 else 822 log_output[:error] = 'Check sum values do NOT match' 823 log_output[:status] = 'FAIL' 824 end 825 else 826 log_output[:error] = 'Expected to be created object does NOT exist' 827 log_output[:status] = 'FAIL' 828 end 829 cleanUp([bucket_name], log_output) 830 rescue => log_output[:error] 831 log_output[:status] = 'FAIL' 832 end 833 834 print_log(log_output, start_time) 835 end 836 end 837 838 # MAIN CODE 839 840 # Create test Class instance and call the tests 841 aws = AwsSdkRubyTest.new 842 file_name1 = 'datafile-1-kB' 843 file_new_name = 'datafile-1-kB-copy' 844 file_name_list = ['datafile-1-kB', 'datafile-1-b', 'datafile-6-MB'] 845 # Add data_dir in front of each file name in file_name_list 846 # The location where the bucket and file 847 # objects are going to be created. 848 data_dir = ENV['MINT_DATA_DIR'] ||= 'MINT_DATA_DIR is not set' 849 file_list = file_name_list.map { |f| File.join(data_dir, f) } 850 destination = '/tmp' 851 852 aws.listBucketsTest 853 aws.listObjectsTest(file_list) 854 aws.makeBucketTest 855 aws.bucketExistsNegativeTest 856 aws.removeBucketTest 857 aws.putObjectTest(File.join(data_dir, file_name1)) 858 aws.removeObjectTest(File.join(data_dir, file_name1)) 859 aws.getObjectTest(File.join(data_dir, file_name1), destination) 860 aws.copyObjectTest(data_dir, file_name1) 861 aws.copyObjectTest(data_dir, file_name1, file_new_name) 862 aws.presignedGetObjectTest(data_dir, file_name1) 863 aws.presignedPutObjectTest(data_dir, file_name1) 864 aws.presignedPostObjectTest(data_dir, file_name1, 60, 3*1024*1024)