github.com/NVIDIA/aistore@v1.3.23-0.20240517131212-7df6609be51d/python/tests/s3compat/tests.py (about)

     1  #!/usr/bin/env python
     2  # -*- coding: utf-8 -*-
     3  # MinIO Python Library for Amazon S3 Compatible Cloud Storage,
     4  # (C) 2015, 2016, 2017, 2018 MinIO, Inc.
     5  #
     6  # Licensed under the Apache License, Version 2.0 (the "License");
     7  # you may not use this file except in compliance with the License.
     8  # You may obtain a copy of the License at
     9  #
    10  #     http://www.apache.org/licenses/LICENSE-2.0
    11  #
    12  # Unless required by applicable law or agreed to in writing, software
    13  # distributed under the License is distributed on an "AS IS" BASIS,
    14  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    15  # See the License for the specific language governing permissions and
    16  # limitations under the License.
    17  
    18  # pylint: disable=too-many-lines,unused-variable,implicit-str-concat
    19  """Functional tests of minio-py."""
    20  
    21  from __future__ import absolute_import, division
    22  
    23  import hashlib
    24  import io
    25  import json
    26  import math
    27  import os
    28  import random
    29  import shutil
    30  import sys
    31  import tempfile
    32  import time
    33  import traceback
    34  from binascii import crc32
    35  from datetime import datetime, timedelta, timezone
    36  from inspect import getfullargspec
    37  from threading import Thread
    38  from uuid import uuid4
    39  
    40  import certifi
    41  import urllib3
    42  
    43  from minio import Minio
    44  from minio.commonconfig import ENABLED, REPLACE, CopySource
    45  from minio.datatypes import PostPolicy
    46  from minio.deleteobjects import DeleteObject
    47  from minio.error import S3Error
    48  from minio.select import CSVInputSerialization, CSVOutputSerialization, SelectRequest
    49  from minio.sse import SseCustomerKey
    50  from minio.time import to_http_header
    51  from minio.versioningconfig import VersioningConfig
    52  
    53  _CLIENT = None  # initialized in main().
    54  _TEST_FILE = None  # initialized in main().
    55  _LARGE_FILE = None  # initialized in main().
    56  _IS_AWS = None  # initialized in main().
    57  KB = 1024
    58  MB = 1024 * KB
    59  HTTP = urllib3.PoolManager(
    60      cert_reqs="CERT_REQUIRED",
    61      ca_certs=os.environ.get("SSL_CERT_FILE") or certifi.where(),
    62  )
    63  AWS_REGION = os.environ.get("AWS_REGION", "us-east-1")
    64  
    65  
    66  def _gen_bucket_name():
    67      """Generate random bucket name."""
    68      return f"minio-py-test-{uuid4()}"
    69  
    70  
    71  def _get_sha256sum(filename):
    72      """Get SHA-256 checksum of given file."""
    73      with open(filename, "rb") as file:
    74          contents = file.read()
    75          return hashlib.sha256(contents).hexdigest()
    76  
    77  
    78  def _get_random_string(size):
    79      """Get random string of given size."""
    80      if not size:
    81          return ""
    82  
    83      chars = "abcdefghijklmnopqrstuvwxyz"
    84      chars *= int(math.ceil(size / len(chars)))
    85      chars = list(chars[:size])
    86      random.shuffle(chars)
    87      return "".join(chars)
    88  
    89  
    90  class LimitedRandomReader:  # pylint: disable=too-few-public-methods
    91      """Random data reader of specified size."""
    92  
    93      def __init__(self, limit):
    94          self._limit = limit
    95  
    96      def read(self, size=64 * KB):
    97          """Read random data of specified size."""
    98          if size < 0 or size > self._limit:
    99              size = self._limit
   100  
   101          data = _get_random_string(size)
   102          self._limit -= size
   103          return data.encode()
   104  
   105  
   106  def _call(log_entry, func, *args, **kwargs):
   107      """Execute given function."""
   108      log_entry["method"] = func
   109      return func(*args, **kwargs)
   110  
   111  
   112  class TestFailed(Exception):
   113      """Indicate test failed error."""
   114  
   115  
   116  def _call_test(func, *args, **kwargs):
   117      """Execute given test function."""
   118  
   119      log_entry = {
   120          "name": func.__name__,
   121          "status": "PASS",
   122      }
   123  
   124      start_time = time.time()
   125      try:
   126          func(log_entry, *args, **kwargs)
   127      except S3Error as exc:
   128          if exc.code == "NotImplemented":
   129              log_entry["alert"] = "Not Implemented"
   130              log_entry["status"] = "NA"
   131          else:
   132              log_entry["message"] = f"{exc}"
   133              log_entry["error"] = traceback.format_exc()
   134              log_entry["status"] = "FAIL"
   135      except Exception as exc:  # pylint: disable=broad-except
   136          log_entry["message"] = f"{exc}"
   137          log_entry["error"] = traceback.format_exc()
   138          log_entry["status"] = "FAIL"
   139  
   140      if log_entry.get("method"):
   141          # pylint: disable=deprecated-method
   142          args_string = ", ".join(getfullargspec(log_entry["method"]).args[1:])
   143          log_entry["function"] = f"{log_entry['method'].__name__}({args_string})"
   144      log_entry["args"] = {k: v for k, v in log_entry.get("args", {}).items() if v}
   145      log_entry["duration"] = int(round((time.time() - start_time) * 1000))
   146      log_entry["name"] = "minio-py:" + log_entry["name"]
   147      log_entry["method"] = None
   148      print(json.dumps({k: v for k, v in log_entry.items() if v}))
   149      if log_entry["status"] == "FAIL":
   150          raise TestFailed()
   151  
   152  
   153  def test_make_bucket_default_region(log_entry):
   154      """Test make_bucket() with default region."""
   155  
   156      # Get a unique bucket_name
   157      bucket_name = _gen_bucket_name()
   158  
   159      log_entry["args"] = {
   160          "bucket_name": bucket_name,
   161          "location": "default value ('{}')".format(AWS_REGION),  # Default location
   162      }
   163  
   164      # Create a bucket with default bucket location
   165      _call(log_entry, _CLIENT.make_bucket, bucket_name)
   166      # Check if bucket was created properly
   167      _call(log_entry, _CLIENT.bucket_exists, bucket_name)
   168      # Remove bucket
   169      _call(log_entry, _CLIENT.remove_bucket, bucket_name)
   170      # Test passes
   171      log_entry["method"] = _CLIENT.make_bucket
   172  
   173  
   174  def test_make_bucket_with_region(log_entry):
   175      """Test make_bucket() with region."""
   176  
   177      # Only test make bucket with region against AWS S3
   178      if not _IS_AWS:
   179          return
   180  
   181      # Get a unique bucket_name
   182      bucket_name = _gen_bucket_name()
   183      # A non-default location
   184      location = "us-west-1"
   185  
   186      log_entry["args"] = {
   187          "bucket_name": bucket_name,
   188          "location": location,
   189      }
   190  
   191      # Create a bucket with default bucket location
   192      _call(log_entry, _CLIENT.make_bucket, bucket_name, location)
   193      # Check if bucket was created properly
   194      _call(log_entry, _CLIENT.bucket_exists, bucket_name)
   195      # Remove bucket
   196      _call(log_entry, _CLIENT.remove_bucket, bucket_name)
   197      # Test passes
   198      log_entry["method"] = _CLIENT.make_bucket
   199  
   200  
   201  def test_negative_make_bucket_invalid_name(log_entry):  # pylint: disable=invalid-name
   202      """Test make_bucket() with invalid bucket name."""
   203  
   204      # Get a unique bucket_name
   205      bucket_name = _gen_bucket_name()
   206      # Default location
   207      log_entry["args"] = {
   208          "location": "default value ('{}')".format(AWS_REGION),
   209      }
   210      # Create an array of invalid bucket names to test
   211      invalid_bucket_name_list = [
   212          bucket_name + ".",
   213          "." + bucket_name,
   214          bucket_name + "...abcd",
   215      ]
   216      for name in invalid_bucket_name_list:
   217          log_entry["args"]["bucket_name"] = name
   218          try:
   219              # Create a bucket with default bucket location
   220              _call(log_entry, _CLIENT.make_bucket, name)
   221              # Check if bucket was created properly
   222              _call(log_entry, _CLIENT.bucket_exists, name)
   223              # Remove bucket
   224              _call(log_entry, _CLIENT.remove_bucket, name)
   225          except ValueError:
   226              pass
   227      # Test passes
   228      log_entry["method"] = _CLIENT.make_bucket
   229      log_entry["args"]["bucket_name"] = invalid_bucket_name_list
   230  
   231  
   232  def test_list_buckets(log_entry):
   233      """Test list_buckets()."""
   234  
   235      # Get a unique bucket_name
   236      bucket_name = _gen_bucket_name()
   237  
   238      # Create a bucket with default bucket location
   239      _call(log_entry, _CLIENT.make_bucket, bucket_name)
   240  
   241      try:
   242          buckets = _CLIENT.list_buckets()
   243          for bucket in buckets:
   244              # bucket object should be of a valid value.
   245              if bucket.name and bucket.creation_date:
   246                  continue
   247              raise ValueError("list_bucket api failure")
   248      finally:
   249          # Remove bucket
   250          _call(log_entry, _CLIENT.remove_bucket, bucket_name)
   251  
   252  
   253  def test_select_object_content(log_entry):
   254      """Test select_object_content()."""
   255  
   256      # Get a unique bucket_name and object_name
   257      bucket_name = _gen_bucket_name()
   258      csvfile = "test.csv"
   259  
   260      log_entry["args"] = {
   261          "bucket_name": bucket_name,
   262          "object_name": csvfile,
   263      }
   264  
   265      try:
   266          _CLIENT.make_bucket(bucket_name)
   267          content = io.BytesIO(b"col1,col2,col3\none,two,three\nX,Y,Z\n")
   268          _CLIENT.put_object(bucket_name, csvfile, content, len(content.getvalue()))
   269  
   270          request = SelectRequest(
   271              "select * from s3object",
   272              CSVInputSerialization(),
   273              CSVOutputSerialization(),
   274              request_progress=True,
   275          )
   276          data = _CLIENT.select_object_content(bucket_name, csvfile, request)
   277          # Get the records
   278          records = io.BytesIO()
   279          for data_bytes in data.stream(16):
   280              records.write(data_bytes)
   281  
   282          expected_crc = crc32(content.getvalue()) & 0xFFFFFFFF
   283          generated_crc = crc32(records.getvalue()) & 0xFFFFFFFF
   284          if expected_crc != generated_crc:
   285              raise ValueError(
   286                  "Data mismatch Expected : " '"col1,col2,col3\none,two,three\nX,Y,Z\n"',
   287                  f"Received {records.getvalue().decode()}",
   288              )
   289      finally:
   290          _CLIENT.remove_object(bucket_name, csvfile)
   291          _CLIENT.remove_bucket(bucket_name)
   292  
   293  
   294  def _test_fput_object(bucket_name, object_name, filename, metadata, sse):
   295      """Test fput_object()."""
   296      try:
   297          _CLIENT.make_bucket(bucket_name)
   298          if _IS_AWS:
   299              _CLIENT.fput_object(
   300                  bucket_name, object_name, filename, metadata=metadata, sse=sse
   301              )
   302          else:
   303              _CLIENT.fput_object(bucket_name, object_name, filename, sse=sse)
   304  
   305          _CLIENT.stat_object(bucket_name, object_name, ssec=sse)
   306      finally:
   307          _CLIENT.remove_object(bucket_name, object_name)
   308          _CLIENT.remove_bucket(bucket_name)
   309  
   310  
   311  def test_fput_object_small_file(log_entry, sse=None):
   312      """Test fput_object() with small file."""
   313  
   314      if sse:
   315          log_entry["name"] += "_with_SSE-C"
   316  
   317      # Get a unique bucket_name and object_name
   318      bucket_name = _gen_bucket_name()
   319      object_name = f"{uuid4()}-f"
   320      metadata = {"x-amz-storage-class": "STANDARD_IA"}
   321  
   322      log_entry["args"] = {
   323          "bucket_name": bucket_name,
   324          "object_name": object_name,
   325          "file_path": _TEST_FILE,
   326          "metadata": metadata,
   327      }
   328  
   329      _test_fput_object(bucket_name, object_name, _TEST_FILE, metadata, sse)
   330  
   331  
   332  def test_fput_object_large_file(log_entry, sse=None):
   333      """Test fput_object() with large file."""
   334  
   335      if sse:
   336          log_entry["name"] += "_with_SSE-C"
   337  
   338      # Get a unique bucket_name and object_name
   339      bucket_name = _gen_bucket_name()
   340      object_name = f"{uuid4()}-large"
   341      metadata = {"x-amz-storage-class": "STANDARD_IA"}
   342  
   343      log_entry["args"] = {
   344          "bucket_name": bucket_name,
   345          "object_name": object_name,
   346          "file_path": _LARGE_FILE,
   347          "metadata": metadata,
   348      }
   349  
   350      # upload local large file through multipart.
   351      _test_fput_object(bucket_name, object_name, _LARGE_FILE, metadata, sse)
   352  
   353  
   354  def test_fput_object_with_content_type(log_entry):  # pylint: disable=invalid-name
   355      """Test fput_object() with content-type."""
   356  
   357      # Get a unique bucket_name and object_name
   358      bucket_name = _gen_bucket_name()
   359      object_name = f"{uuid4()}-f"
   360      metadata = {"x-amz-storage-class": "STANDARD_IA"}
   361      content_type = "application/octet-stream"
   362  
   363      log_entry["args"] = {
   364          "bucket_name": bucket_name,
   365          "object_name": object_name,
   366          "file_path": _TEST_FILE,
   367          "metadata": metadata,
   368          "content_type": content_type,
   369      }
   370  
   371      _test_fput_object(bucket_name, object_name, _TEST_FILE, metadata, None)
   372  
   373  
   374  def _validate_stat(st_obj, expected_size, expected_meta, version_id=None):
   375      """Validate stat information."""
   376  
   377      expected_meta = {key.lower(): value for key, value in (expected_meta or {}).items()}
   378      received_etag = st_obj.etag
   379      received_metadata = {
   380          key.lower(): value for key, value in (st_obj.metadata or {}).items()
   381      }
   382      received_content_type = st_obj.content_type
   383      received_size = st_obj.size
   384      received_is_dir = st_obj.is_dir
   385  
   386      if not received_etag:
   387          raise ValueError("No Etag value is returned.")
   388  
   389      if st_obj.version_id != version_id:
   390          raise ValueError(
   391              f"version-id mismatch. expected={version_id}, " f"got={st_obj.version_id}"
   392          )
   393  
   394      # content_type by default can be either application/octet-stream or
   395      # binary/octet-stream
   396      if received_content_type not in ["application/octet-stream", "binary/octet-stream"]:
   397          raise ValueError(
   398              "Incorrect content type. Expected: ",
   399              "'application/octet-stream' or 'binary/octet-stream'," " received: ",
   400              received_content_type,
   401          )
   402  
   403      if received_size != expected_size:
   404          raise ValueError(
   405              "Incorrect file size. Expected: 11534336", ", received: ", received_size
   406          )
   407  
   408      if received_is_dir:
   409          raise ValueError(
   410              "Incorrect file type. Expected: is_dir=False",
   411              ", received: is_dir=",
   412              received_is_dir,
   413          )
   414  
   415      if not all(i in received_metadata.items() for i in expected_meta.items()):
   416          raise ValueError("Metadata key 'x-amz-meta-testing' not found")
   417  
   418  
   419  def test_copy_object_no_copy_condition(  # pylint: disable=invalid-name
   420      log_entry, ssec_copy=None, ssec=None
   421  ):
   422      """Test copy_object() with no conditiions."""
   423  
   424      if ssec_copy or ssec:
   425          log_entry["name"] += "_SSEC"
   426  
   427      # Get a unique bucket_name and object_name
   428      bucket_name = _gen_bucket_name()
   429      object_name = f"{uuid4()}"
   430      object_source = object_name + "-source"
   431      object_copy = object_name + "-copy"
   432  
   433      log_entry["args"] = {
   434          "bucket_name": bucket_name,
   435          "object_source": object_source,
   436          "object_name": object_copy,
   437      }
   438  
   439      try:
   440          _CLIENT.make_bucket(bucket_name)
   441          # Upload a streaming object of 1 KiB
   442          size = 1 * KB
   443          reader = LimitedRandomReader(size)
   444          _CLIENT.put_object(bucket_name, object_source, reader, size, sse=ssec)
   445          _CLIENT.copy_object(
   446              bucket_name,
   447              object_copy,
   448              sse=ssec,
   449              source=CopySource(bucket_name, object_source, ssec=ssec_copy),
   450          )
   451          st_obj = _CLIENT.stat_object(bucket_name, object_copy, ssec=ssec)
   452          _validate_stat(st_obj, size, {})
   453      finally:
   454          _CLIENT.remove_object(bucket_name, object_source)
   455          _CLIENT.remove_object(bucket_name, object_copy)
   456          _CLIENT.remove_bucket(bucket_name)
   457  
   458  
   459  def test_copy_object_with_metadata(log_entry):
   460      """Test copy_object() with metadata."""
   461  
   462      # Get a unique bucket_name and object_name
   463      bucket_name = _gen_bucket_name()
   464      object_name = f"{uuid4()}"
   465      object_source = object_name + "-source"
   466      object_copy = object_name + "-copy"
   467      metadata = {
   468          "testing-string": "string",
   469          "testing-int": 1,
   470          10: "value",
   471      }
   472  
   473      log_entry["args"] = {
   474          "bucket_name": bucket_name,
   475          "object_source": object_source,
   476          "object_name": object_copy,
   477          "metadata": metadata,
   478      }
   479  
   480      try:
   481          _CLIENT.make_bucket(bucket_name)
   482          # Upload a streaming object of 1 KiB
   483          size = 1 * KB
   484          reader = LimitedRandomReader(size)
   485          _CLIENT.put_object(bucket_name, object_source, reader, size)
   486          # Perform a server side copy of an object
   487          _CLIENT.copy_object(
   488              bucket_name,
   489              object_copy,
   490              CopySource(bucket_name, object_source),
   491              metadata=metadata,
   492              metadata_directive=REPLACE,
   493          )
   494          # Verification
   495          st_obj = _CLIENT.stat_object(bucket_name, object_copy)
   496          expected_metadata = {
   497              "x-amz-meta-testing-int": "1",
   498              "x-amz-meta-testing-string": "string",
   499              "x-amz-meta-10": "value",
   500          }
   501          _validate_stat(st_obj, size, expected_metadata)
   502      finally:
   503          _CLIENT.remove_object(bucket_name, object_source)
   504          _CLIENT.remove_object(bucket_name, object_copy)
   505          _CLIENT.remove_bucket(bucket_name)
   506  
   507  
   508  def test_copy_object_etag_match(log_entry):
   509      """Test copy_object() with etag match condition."""
   510  
   511      # Get a unique bucket_name and object_name
   512      bucket_name = _gen_bucket_name()
   513      object_name = f"{uuid4()}"
   514      object_source = object_name + "-source"
   515      object_copy = object_name + "-copy"
   516  
   517      log_entry["args"] = {
   518          "bucket_name": bucket_name,
   519          "object_source": object_source,
   520          "object_name": object_copy,
   521      }
   522  
   523      try:
   524          _CLIENT.make_bucket(bucket_name)
   525          # Upload a streaming object of 1 KiB
   526          size = 1 * KB
   527          reader = LimitedRandomReader(size)
   528          _CLIENT.put_object(bucket_name, object_source, reader, size)
   529          # Perform a server side copy of an object
   530          _CLIENT.copy_object(
   531              bucket_name,
   532              object_copy,
   533              CopySource(bucket_name, object_source),
   534          )
   535          # Verification
   536          source_etag = _CLIENT.stat_object(bucket_name, object_source).etag
   537          log_entry["args"]["conditions"] = {"set_match_etag": source_etag}
   538          _CLIENT.copy_object(
   539              bucket_name,
   540              object_copy,
   541              CopySource(bucket_name, object_source, match_etag=source_etag),
   542          )
   543      finally:
   544          _CLIENT.remove_object(bucket_name, object_source)
   545          _CLIENT.remove_object(bucket_name, object_copy)
   546          _CLIENT.remove_bucket(bucket_name)
   547  
   548  
   549  def test_copy_object_negative_etag_match(log_entry):  # pylint: disable=invalid-name
   550      """Test copy_object() with etag not match condition."""
   551  
   552      # Get a unique bucket_name and object_name
   553      bucket_name = _gen_bucket_name()
   554      object_name = f"{uuid4()}"
   555      object_source = object_name + "-source"
   556      object_copy = object_name + "-copy"
   557  
   558      log_entry["args"] = {
   559          "bucket_name": bucket_name,
   560          "object_source": object_source,
   561          "object_name": object_copy,
   562      }
   563  
   564      try:
   565          _CLIENT.make_bucket(bucket_name)
   566          # Upload a streaming object of 1 KiB
   567          size = 1 * KB
   568          reader = LimitedRandomReader(size)
   569          _CLIENT.put_object(bucket_name, object_source, reader, size)
   570          try:
   571              # Perform a server side copy of an object
   572              # with incorrect pre-conditions and fail
   573              etag = "test-etag"
   574              log_entry["args"]["conditions"] = {"set_match_etag": etag}
   575              _CLIENT.copy_object(
   576                  bucket_name,
   577                  object_copy,
   578                  CopySource(bucket_name, object_source, match_etag=etag),
   579              )
   580          except S3Error as exc:
   581              if exc.code != "PreconditionFailed":
   582                  raise
   583      finally:
   584          _CLIENT.remove_object(bucket_name, object_source)
   585          _CLIENT.remove_object(bucket_name, object_copy)
   586          _CLIENT.remove_bucket(bucket_name)
   587  
   588  
   589  def test_copy_object_modified_since(log_entry):
   590      """Test copy_object() with modified since condition."""
   591  
   592      # Get a unique bucket_name and object_name
   593      bucket_name = _gen_bucket_name()
   594      object_name = f"{uuid4()}"
   595      object_source = object_name + "-source"
   596      object_copy = object_name + "-copy"
   597  
   598      log_entry["args"] = {
   599          "bucket_name": bucket_name,
   600          "object_source": object_source,
   601          "object_name": object_copy,
   602      }
   603  
   604      try:
   605          _CLIENT.make_bucket(bucket_name)
   606          # Upload a streaming object of 1 KiB
   607          size = 1 * KB
   608          reader = LimitedRandomReader(size)
   609          _CLIENT.put_object(bucket_name, object_source, reader, size)
   610          # Set up the 'modified_since' copy condition
   611          mod_since = datetime(2014, 4, 1, tzinfo=timezone.utc)
   612          log_entry["args"]["conditions"] = {
   613              "set_modified_since": to_http_header(mod_since)
   614          }
   615          # Perform a server side copy of an object
   616          # and expect the copy to complete successfully
   617          _CLIENT.copy_object(
   618              bucket_name,
   619              object_copy,
   620              CopySource(bucket_name, object_source, modified_since=mod_since),
   621          )
   622      finally:
   623          _CLIENT.remove_object(bucket_name, object_source)
   624          _CLIENT.remove_object(bucket_name, object_copy)
   625          _CLIENT.remove_bucket(bucket_name)
   626  
   627  
   628  def test_copy_object_unmodified_since(log_entry):  # pylint: disable=invalid-name
   629      """Test copy_object() with unmodified since condition."""
   630  
   631      # Get a unique bucket_name and object_name
   632      bucket_name = _gen_bucket_name()
   633      object_name = f"{uuid4()}"
   634      object_source = object_name + "-source"
   635      object_copy = object_name + "-copy"
   636  
   637      log_entry["args"] = {
   638          "bucket_name": bucket_name,
   639          "object_source": object_source,
   640          "object_name": object_copy,
   641      }
   642  
   643      try:
   644          _CLIENT.make_bucket(bucket_name)
   645          # Upload a streaming object of 1 KiB
   646          size = 1 * KB
   647          reader = LimitedRandomReader(size)
   648          _CLIENT.put_object(bucket_name, object_source, reader, size)
   649          # Set up the 'unmodified_since' copy condition
   650          unmod_since = datetime(2014, 4, 1, tzinfo=timezone.utc)
   651          log_entry["args"]["conditions"] = {
   652              "set_unmodified_since": to_http_header(unmod_since)
   653          }
   654          try:
   655              # Perform a server side copy of an object and expect
   656              # the copy to fail since the creation/modification
   657              # time is now, way later than unmodification time, April 1st, 2014
   658              _CLIENT.copy_object(
   659                  bucket_name,
   660                  object_copy,
   661                  CopySource(
   662                      bucket_name,
   663                      object_source,
   664                      unmodified_since=unmod_since,
   665                  ),
   666              )
   667          except S3Error as exc:
   668              if exc.code != "PreconditionFailed":
   669                  raise
   670      finally:
   671          _CLIENT.remove_object(bucket_name, object_source)
   672          _CLIENT.remove_object(bucket_name, object_copy)
   673          _CLIENT.remove_bucket(bucket_name)
   674  
   675  
   676  def test_put_object(log_entry, sse=None):
   677      """Test put_object()."""
   678  
   679      if sse:
   680          log_entry["name"] += "_SSE"
   681  
   682      # Get a unique bucket_name and object_name
   683      bucket_name = _gen_bucket_name()
   684      object_name = f"{uuid4()}"
   685      length = 1 * MB
   686  
   687      log_entry["args"] = {
   688          "bucket_name": bucket_name,
   689          "object_name": object_name,
   690          "length": length,
   691          "data": "LimitedRandomReader(1 * MB)",
   692      }
   693  
   694      try:
   695          _CLIENT.make_bucket(bucket_name)
   696          # Put/Upload a streaming object of 1 MiB
   697          reader = LimitedRandomReader(length)
   698          _CLIENT.put_object(bucket_name, object_name, reader, length, sse=sse)
   699          _CLIENT.stat_object(bucket_name, object_name, ssec=sse)
   700  
   701          # Put/Upload a streaming object of 11 MiB
   702          log_entry["args"]["length"] = length = 11 * MB
   703          reader = LimitedRandomReader(length)
   704          log_entry["args"]["data"] = "LimitedRandomReader(11 * MB)"
   705          log_entry["args"]["metadata"] = metadata = {
   706              "x-amz-meta-testing": "value",
   707              "test-key": "value2",
   708          }
   709          log_entry["args"]["content_type"] = content_type = "application/octet-stream"
   710          log_entry["args"]["object_name"] = object_name + "-metadata"
   711          _CLIENT.put_object(
   712              bucket_name,
   713              object_name + "-metadata",
   714              reader,
   715              length,
   716              content_type,
   717              metadata,
   718              sse=sse,
   719          )
   720          # Stat on the uploaded object to check if it exists
   721          # Fetch saved stat metadata on a previously uploaded object with
   722          # metadata.
   723          st_obj = _CLIENT.stat_object(bucket_name, object_name + "-metadata", ssec=sse)
   724          normalized_meta = {
   725              key.lower(): value for key, value in (st_obj.metadata or {}).items()
   726          }
   727          if "x-amz-meta-testing" not in normalized_meta:
   728              raise ValueError("Metadata key 'x-amz-meta-testing' not found")
   729          value = normalized_meta["x-amz-meta-testing"]
   730          if value != "value":
   731              raise ValueError(f"Metadata key has unexpected value {value}")
   732          if "x-amz-meta-test-key" not in normalized_meta:
   733              raise ValueError("Metadata key 'x-amz-meta-test-key' not found")
   734      finally:
   735          _CLIENT.remove_object(bucket_name, object_name)
   736          _CLIENT.remove_object(bucket_name, object_name + "-metadata")
   737          _CLIENT.remove_bucket(bucket_name)
   738  
   739  
   740  def test_negative_put_object_with_path_segment(  # pylint: disable=invalid-name
   741      log_entry,
   742  ):
   743      """Test put_object() failure with path segment."""
   744  
   745      # Get a unique bucket_name and object_name
   746      bucket_name = _gen_bucket_name()
   747      object_name = f"/a/b/c/{uuid4()}"
   748      length = 0
   749  
   750      log_entry["args"] = {
   751          "bucket_name": bucket_name,
   752          "object_name": object_name,
   753          "length": length,
   754          "data": "",
   755      }
   756  
   757      try:
   758          _CLIENT.make_bucket(bucket_name)
   759          _CLIENT.put_object(bucket_name, object_name, io.BytesIO(b""), 0)
   760          _CLIENT.remove_object(bucket_name, object_name)
   761      except S3Error as err:
   762          if err.code != "XMinioInvalidObjectName":
   763              raise
   764      finally:
   765          _CLIENT.remove_bucket(bucket_name)
   766  
   767  
   768  def _test_stat_object(log_entry, sse=None, version_check=False):
   769      """Test stat_object()."""
   770  
   771      if sse:
   772          log_entry["name"] += "_SSEC"
   773  
   774      # Get a unique bucket_name and object_name
   775      bucket_name = _gen_bucket_name()
   776      object_name = f"{uuid4()}"
   777      length = 1 * MB
   778  
   779      log_entry["args"] = {
   780          "bucket_name": bucket_name,
   781          "object_name": object_name,
   782          "length": length,
   783          "data": "LimitedRandomReader(1 * MB)",
   784      }
   785  
   786      version_id1 = None
   787      version_id2 = None
   788  
   789      _CLIENT.make_bucket(bucket_name)
   790      try:
   791          if version_check:
   792              _CLIENT.set_bucket_versioning(
   793                  bucket_name,
   794                  VersioningConfig(ENABLED),
   795              )
   796          # Put/Upload a streaming object of 1 MiB
   797          reader = LimitedRandomReader(length)
   798          result = _CLIENT.put_object(
   799              bucket_name,
   800              object_name,
   801              reader,
   802              length,
   803              sse=sse,
   804          )
   805          version_id1 = result.version_id
   806          _CLIENT.stat_object(
   807              bucket_name,
   808              object_name,
   809              ssec=sse,
   810              version_id=version_id1,
   811          )
   812  
   813          # Put/Upload a streaming object of 11 MiB
   814          log_entry["args"]["length"] = length = 11 * MB
   815          reader = LimitedRandomReader(length)
   816          log_entry["args"]["data"] = "LimitedRandomReader(11 * MB)"
   817          log_entry["args"]["metadata"] = metadata = {"X-Amz-Meta-Testing": "value"}
   818          log_entry["args"]["content_type"] = content_type = "application/octet-stream"
   819          log_entry["args"]["object_name"] = object_name + "-metadata"
   820          result = _CLIENT.put_object(
   821              bucket_name,
   822              object_name + "-metadata",
   823              reader,
   824              length,
   825              content_type,
   826              metadata,
   827              sse=sse,
   828          )
   829          version_id2 = result.version_id
   830          # Stat on the uploaded object to check if it exists
   831          # Fetch saved stat metadata on a previously uploaded object with
   832          # metadata.
   833          st_obj = _CLIENT.stat_object(
   834              bucket_name,
   835              object_name + "-metadata",
   836              ssec=sse,
   837              version_id=version_id2,
   838          )
   839          # Verify the collected stat data.
   840          _validate_stat(
   841              st_obj,
   842              length,
   843              metadata,
   844              version_id=version_id2,
   845          )
   846      finally:
   847          _CLIENT.remove_object(bucket_name, object_name, version_id=version_id1)
   848          _CLIENT.remove_object(
   849              bucket_name,
   850              object_name + "-metadata",
   851              version_id=version_id2,
   852          )
   853          _CLIENT.remove_bucket(bucket_name)
   854  
   855  
   856  def test_stat_object(log_entry, sse=None):
   857      """Test stat_object()."""
   858      _test_stat_object(log_entry, sse)
   859  
   860  
   861  def test_stat_object_version(log_entry, sse=None):
   862      """Test stat_object() of versioned object."""
   863      _test_stat_object(log_entry, sse, version_check=True)
   864  
   865  
   866  def _test_remove_object(log_entry, version_check=False):
   867      """Test remove_object()."""
   868  
   869      # Get a unique bucket_name and object_name
   870      bucket_name = _gen_bucket_name()
   871      object_name = f"{uuid4()}"
   872      length = 1 * KB
   873  
   874      log_entry["args"] = {
   875          "bucket_name": bucket_name,
   876          "object_name": object_name,
   877      }
   878  
   879      _CLIENT.make_bucket(bucket_name)
   880      try:
   881          if version_check:
   882              _CLIENT.set_bucket_versioning(
   883                  bucket_name,
   884                  VersioningConfig(ENABLED),
   885              )
   886          result = _CLIENT.put_object(
   887              bucket_name,
   888              object_name,
   889              LimitedRandomReader(length),
   890              length,
   891          )
   892          _CLIENT.remove_object(
   893              bucket_name,
   894              object_name,
   895              version_id=result.version_id,
   896          )
   897      finally:
   898          _CLIENT.remove_bucket(bucket_name)
   899  
   900  
   901  def test_remove_object(log_entry):
   902      """Test remove_object()."""
   903      _test_remove_object(log_entry)
   904  
   905  
   906  def test_remove_object_version(log_entry):
   907      """Test remove_object() of versioned object."""
   908      _test_remove_object(log_entry, version_check=True)
   909  
   910  
   911  def _test_get_object(log_entry, sse=None, version_check=False):
   912      """Test get_object()."""
   913  
   914      if sse:
   915          log_entry["name"] += "_SSEC"
   916  
   917      # Get a unique bucket_name and object_name
   918      bucket_name = _gen_bucket_name()
   919      object_name = f"{uuid4()}"
   920      length = 1 * MB
   921  
   922      log_entry["args"] = {
   923          "bucket_name": bucket_name,
   924          "object_name": object_name,
   925      }
   926  
   927      _CLIENT.make_bucket(bucket_name)
   928      version_id = None
   929      try:
   930          if version_check:
   931              _CLIENT.set_bucket_versioning(
   932                  bucket_name,
   933                  VersioningConfig(ENABLED),
   934              )
   935          result = _CLIENT.put_object(
   936              bucket_name,
   937              object_name,
   938              LimitedRandomReader(length),
   939              length,
   940              sse=sse,
   941          )
   942          version_id = result.version_id
   943          # Get/Download a full object, iterate on response to save to disk
   944          object_data = _CLIENT.get_object(
   945              bucket_name,
   946              object_name,
   947              ssec=sse,
   948              version_id=version_id,
   949          )
   950          newfile = "newfile جديد"
   951          with open(newfile, "wb") as file_data:
   952              shutil.copyfileobj(object_data, file_data)
   953          os.remove(newfile)
   954      finally:
   955          _CLIENT.remove_object(bucket_name, object_name, version_id=version_id)
   956          _CLIENT.remove_bucket(bucket_name)
   957  
   958  
   959  def test_get_object(log_entry, sse=None):
   960      """Test get_object()."""
   961      _test_get_object(log_entry, sse)
   962  
   963  
   964  def test_get_object_version(log_entry, sse=None):
   965      """Test get_object() for versioned object."""
   966      _test_get_object(log_entry, sse, version_check=True)
   967  
   968  
   969  def _test_fget_object(log_entry, sse=None, version_check=False):
   970      """Test fget_object()."""
   971  
   972      if sse:
   973          log_entry["name"] += "_SSEC"
   974  
   975      # Get a unique bucket_name and object_name
   976      bucket_name = _gen_bucket_name()
   977      object_name = f"{uuid4()}"
   978      tmpfd, tmpfile = tempfile.mkstemp()
   979      os.close(tmpfd)
   980      length = 1 * MB
   981  
   982      log_entry["args"] = {
   983          "bucket_name": bucket_name,
   984          "object_name": object_name,
   985          "file_path": tmpfile,
   986      }
   987  
   988      _CLIENT.make_bucket(bucket_name)
   989      version_id = None
   990      try:
   991          if version_check:
   992              _CLIENT.set_bucket_versioning(
   993                  bucket_name,
   994                  VersioningConfig(ENABLED),
   995              )
   996          result = _CLIENT.put_object(
   997              bucket_name,
   998              object_name,
   999              LimitedRandomReader(length),
  1000              length,
  1001              sse=sse,
  1002          )
  1003          version_id = result.version_id
  1004          # Get/Download a full object and save locally at path
  1005          _CLIENT.fget_object(
  1006              bucket_name,
  1007              object_name,
  1008              tmpfile,
  1009              ssec=sse,
  1010              version_id=version_id,
  1011          )
  1012          os.remove(tmpfile)
  1013      finally:
  1014          _CLIENT.remove_object(bucket_name, object_name, version_id=version_id)
  1015          _CLIENT.remove_bucket(bucket_name)
  1016  
  1017  
  1018  def test_fget_object(log_entry, sse=None):
  1019      """Test fget_object()."""
  1020      _test_fget_object(log_entry, sse)
  1021  
  1022  
  1023  def test_fget_object_version(log_entry, sse=None):
  1024      """Test fget_object() of versioned object."""
  1025      _test_fget_object(log_entry, sse, version_check=True)
  1026  
  1027  
  1028  def test_get_object_with_default_length(  # pylint: disable=invalid-name
  1029      log_entry, sse=None
  1030  ):
  1031      """Test get_object() with default length."""
  1032  
  1033      if sse:
  1034          log_entry["name"] += "_SSEC"
  1035  
  1036      # Get a unique bucket_name and object_name
  1037      bucket_name = _gen_bucket_name()
  1038      object_name = f"{uuid4()}"
  1039      size = 1 * MB
  1040      length = 1000
  1041      offset = size - length
  1042  
  1043      log_entry["args"] = {
  1044          "bucket_name": bucket_name,
  1045          "object_name": object_name,
  1046          "offset": offset,
  1047      }
  1048  
  1049      _CLIENT.make_bucket(bucket_name)
  1050      try:
  1051          _CLIENT.put_object(
  1052              bucket_name, object_name, LimitedRandomReader(size), size, sse=sse
  1053          )
  1054          # Get half of the object
  1055          object_data = _CLIENT.get_object(
  1056              bucket_name, object_name, offset=offset, ssec=sse
  1057          )
  1058          newfile = "newfile"
  1059          with open(newfile, "wb") as file_data:
  1060              for data in object_data:
  1061                  file_data.write(data)
  1062          # Check if the new file is the right size
  1063          new_file_size = os.path.getsize(newfile)
  1064          os.remove(newfile)
  1065          if new_file_size != length:
  1066              raise ValueError("Unexpected file size after running ")
  1067      finally:
  1068          _CLIENT.remove_object(bucket_name, object_name)
  1069          _CLIENT.remove_bucket(bucket_name)
  1070  
  1071  
  1072  def test_get_partial_object(log_entry, sse=None):
  1073      """Test get_object() by offset/length."""
  1074  
  1075      if sse:
  1076          log_entry["name"] += "_SSEC"
  1077  
  1078      # Get a unique bucket_name and object_name
  1079      bucket_name = _gen_bucket_name()
  1080      object_name = f"{uuid4()}"
  1081      size = 1 * MB
  1082      offset = int(size / 2)
  1083      length = offset - 1000
  1084  
  1085      log_entry["args"] = {
  1086          "bucket_name": bucket_name,
  1087          "object_name": object_name,
  1088          "offset": offset,
  1089      }
  1090  
  1091      _CLIENT.make_bucket(bucket_name)
  1092      try:
  1093          _CLIENT.put_object(
  1094              bucket_name, object_name, LimitedRandomReader(size), size, sse=sse
  1095          )
  1096          # Get half of the object
  1097          object_data = _CLIENT.get_object(
  1098              bucket_name, object_name, offset=offset, length=length, ssec=sse
  1099          )
  1100          newfile = "newfile"
  1101          with open(newfile, "wb") as file_data:
  1102              for data in object_data:
  1103                  file_data.write(data)
  1104          # Check if the new file is the right size
  1105          new_file_size = os.path.getsize(newfile)
  1106          os.remove(newfile)
  1107          if new_file_size != length:
  1108              raise ValueError("Unexpected file size after running ")
  1109      finally:
  1110          _CLIENT.remove_object(bucket_name, object_name)
  1111          _CLIENT.remove_bucket(bucket_name)
  1112  
  1113  
  1114  def _test_list_objects(log_entry, use_api_v1=False, version_check=False):
  1115      """Test list_objects()."""
  1116  
  1117      # Get a unique bucket_name and object_name
  1118      bucket_name = _gen_bucket_name()
  1119      object_name = f"{uuid4()}"
  1120      is_recursive = True
  1121  
  1122      log_entry["args"] = {
  1123          "bucket_name": bucket_name,
  1124          "object_name": object_name,
  1125          "recursive": is_recursive,
  1126      }
  1127  
  1128      _CLIENT.make_bucket(bucket_name)
  1129      version_id1 = None
  1130      version_id2 = None
  1131      try:
  1132          if version_check:
  1133              _CLIENT.set_bucket_versioning(
  1134                  bucket_name,
  1135                  VersioningConfig(ENABLED),
  1136              )
  1137          size = 1 * KB
  1138          result = _CLIENT.put_object(
  1139              bucket_name,
  1140              object_name + "-1",
  1141              LimitedRandomReader(size),
  1142              size,
  1143          )
  1144          version_id1 = result.version_id
  1145          result = _CLIENT.put_object(
  1146              bucket_name,
  1147              object_name + "-2",
  1148              LimitedRandomReader(size),
  1149              size,
  1150          )
  1151          version_id2 = result.version_id
  1152          # List all object paths in bucket.
  1153          objects = _CLIENT.list_objects(
  1154              bucket_name,
  1155              "",
  1156              is_recursive,
  1157              include_version=version_check,
  1158              use_api_v1=use_api_v1,
  1159          )
  1160          for obj in objects:
  1161              _ = (
  1162                  obj.bucket_name,
  1163                  obj.object_name,
  1164                  obj.last_modified,
  1165                  obj.etag,
  1166                  obj.size,
  1167                  obj.content_type,
  1168              )
  1169              if obj.version_id not in [version_id1, version_id2]:
  1170                  raise ValueError(
  1171                      f"version ID mismatch. "
  1172                      f"expected=any{[version_id1, version_id2]}, "
  1173                      f"got:{obj.version_id}"
  1174                  )
  1175      finally:
  1176          _CLIENT.remove_object(
  1177              bucket_name,
  1178              object_name + "-1",
  1179              version_id=version_id1,
  1180          )
  1181          _CLIENT.remove_object(
  1182              bucket_name,
  1183              object_name + "-2",
  1184              version_id=version_id2,
  1185          )
  1186          _CLIENT.remove_bucket(bucket_name)
  1187  
  1188  
  1189  def test_list_objects_v1(log_entry):
  1190      """Test list_objects()."""
  1191      _test_list_objects(log_entry, use_api_v1=True)
  1192  
  1193  
  1194  def test_list_object_v1_versions(log_entry):
  1195      """Test list_objects()."""
  1196      _test_list_objects(log_entry, use_api_v1=True, version_check=True)
  1197  
  1198  
  1199  def _test_list_objects_api(bucket_name, expected_no, *argv):
  1200      """Test list_objects()."""
  1201  
  1202      # argv is composed of prefix and recursive arguments of
  1203      # list_objects api. They are both supposed to be passed as strings.
  1204      objects = _CLIENT.list_objects(bucket_name, *argv)
  1205  
  1206      # expect all objects to be listed
  1207      no_of_files = 0
  1208      for obj in objects:
  1209          _ = (
  1210              obj.bucket_name,
  1211              obj.object_name,
  1212              obj.last_modified,
  1213              obj.etag,
  1214              obj.size,
  1215              obj.content_type,
  1216          )
  1217          no_of_files += 1
  1218  
  1219      if expected_no != no_of_files:
  1220          raise ValueError(
  1221              f"Listed no of objects ({no_of_files}), does not match the "
  1222              f"expected no of objects ({expected_no})"
  1223          )
  1224  
  1225  
  1226  def test_list_objects_with_prefix(log_entry):
  1227      """Test list_objects() with prefix."""
  1228  
  1229      # Get a unique bucket_name and object_name
  1230      bucket_name = _gen_bucket_name()
  1231      object_name = f"{uuid4()}"
  1232  
  1233      log_entry["args"] = {
  1234          "bucket_name": bucket_name,
  1235          "object_name": object_name,
  1236      }
  1237  
  1238      _CLIENT.make_bucket(bucket_name)
  1239      try:
  1240          size = 1 * KB
  1241          no_of_created_files = 4
  1242          path_prefix = ""
  1243          # Create files and directories
  1244          for i in range(no_of_created_files):
  1245              _CLIENT.put_object(
  1246                  bucket_name,
  1247                  f"{path_prefix}{i}_{object_name}",
  1248                  LimitedRandomReader(size),
  1249                  size,
  1250              )
  1251              path_prefix = f"{path_prefix}{i}/"
  1252  
  1253          # Created files and directory structure
  1254          # ._<bucket_name>/
  1255          # |___0_<object_name>
  1256          # |___0/
  1257          #     |___1_<object_name>
  1258          #     |___1/
  1259          #         |___2_<object_name>
  1260          #         |___2/
  1261          #             |___3_<object_name>
  1262          #
  1263  
  1264          # Test and verify list_objects api outputs
  1265          # List objects recursively with NO prefix
  1266          log_entry["args"]["prefix"] = prefix = ""  # no prefix
  1267          log_entry["args"]["recursive"] = recursive = ""
  1268          _test_list_objects_api(bucket_name, no_of_created_files, prefix, True)
  1269  
  1270          # List objects at the top level with no prefix and no recursive option
  1271          # Expect only the top 2 objects to be listed
  1272          _test_list_objects_api(bucket_name, 2)
  1273  
  1274          # List objects for '0' directory/prefix without recursive option
  1275          # Expect 2 object (directory '0' and '0_' object) to be listed
  1276          log_entry["args"]["prefix"] = prefix = "0"
  1277          _test_list_objects_api(bucket_name, 2, prefix)
  1278  
  1279          # List objects for '0/' directory/prefix without recursive option
  1280          # Expect only 2 objects under directory '0/' to be listed,
  1281          # non-recursive
  1282          log_entry["args"]["prefix"] = prefix = "0/"
  1283          _test_list_objects_api(bucket_name, 2, prefix)
  1284  
  1285          # List objects for '0/' directory/prefix, recursively
  1286          # Expect 2 objects to be listed
  1287          log_entry["args"]["prefix"] = prefix = "0/"
  1288          log_entry["args"]["recursive"] = recursive = "True"
  1289          _test_list_objects_api(bucket_name, 3, prefix, recursive)
  1290  
  1291          # List object with '0/1/2/' directory/prefix, non-recursive
  1292          # Expect the single object under directory '0/1/2/' to be listed
  1293          log_entry["args"]["prefix"] = prefix = "0/1/2/"
  1294          _test_list_objects_api(bucket_name, 1, prefix)
  1295      finally:
  1296          path_prefix = ""
  1297          for i in range(no_of_created_files):
  1298              _CLIENT.remove_object(
  1299                  bucket_name,
  1300                  f"{path_prefix}{i}_{object_name}",
  1301              )
  1302              path_prefix = f"{path_prefix}{i}/"
  1303          _CLIENT.remove_bucket(bucket_name)
  1304      # Test passes
  1305      log_entry["args"]["prefix"] = "Several prefix/recursive combinations are tested"
  1306      log_entry["args"]["recursive"] = "Several prefix/recursive combinations are tested"
  1307  
  1308  
  1309  def test_list_objects_with_1001_files(log_entry):  # pylint: disable=invalid-name
  1310      """Test list_objects() with more 1000 objects."""
  1311  
  1312      # Get a unique bucket_name and object_name
  1313      bucket_name = _gen_bucket_name()
  1314      object_name = f"{uuid4()}"
  1315  
  1316      log_entry["args"] = {
  1317          "bucket_name": bucket_name,
  1318          "object_name": f"{object_name}_0 ~ {0}_1000",
  1319      }
  1320  
  1321      _CLIENT.make_bucket(bucket_name)
  1322      try:
  1323          size = 1 * KB
  1324          no_of_created_files = 2000
  1325          # Create files and directories
  1326          for i in range(no_of_created_files):
  1327              _CLIENT.put_object(
  1328                  bucket_name, f"{object_name}_{i}", LimitedRandomReader(size), size
  1329              )
  1330  
  1331          # List objects and check if 1001 files are returned
  1332          _test_list_objects_api(bucket_name, no_of_created_files)
  1333      finally:
  1334          for i in range(no_of_created_files):
  1335              _CLIENT.remove_object(bucket_name, f"{object_name}_{i}")
  1336          _CLIENT.remove_bucket(bucket_name)
  1337  
  1338  
  1339  def test_list_objects(log_entry):
  1340      """Test list_objects()."""
  1341      _test_list_objects(log_entry)
  1342  
  1343  
  1344  def test_list_object_versions(log_entry):
  1345      """Test list_objects() of versioned object."""
  1346      _test_list_objects(log_entry, version_check=True)
  1347  
  1348  
  1349  def test_presigned_get_object_default_expiry(log_entry):  # pylint: disable=invalid-name
  1350      """Test presigned_get_object() with default expiry."""
  1351  
  1352      # Get a unique bucket_name and object_name
  1353      bucket_name = _gen_bucket_name()
  1354      object_name = f"{uuid4()}"
  1355  
  1356      log_entry["args"] = {
  1357          "bucket_name": bucket_name,
  1358          "object_name": object_name,
  1359      }
  1360  
  1361      _CLIENT.make_bucket(bucket_name)
  1362      try:
  1363          size = 1 * KB
  1364          _CLIENT.put_object(bucket_name, object_name, LimitedRandomReader(size), size)
  1365          presigned_get_object_url = _CLIENT.presigned_get_object(
  1366              bucket_name, object_name
  1367          )
  1368          response = HTTP.urlopen("GET", presigned_get_object_url)
  1369          if response.status != 200:
  1370              raise Exception(
  1371                  f"Presigned GET object URL {presigned_get_object_url} failed; "
  1372                  f"code: {response.code}, error: {response.data}"
  1373              )
  1374      finally:
  1375          _CLIENT.remove_object(bucket_name, object_name)
  1376          _CLIENT.remove_bucket(bucket_name)
  1377  
  1378  
  1379  def test_presigned_get_object_expiry(log_entry):  # pylint: disable=invalid-name
  1380      """Test presigned_get_object() with expiry."""
  1381  
  1382      # Get a unique bucket_name and object_name
  1383      bucket_name = _gen_bucket_name()
  1384      object_name = f"{uuid4()}"
  1385  
  1386      log_entry["args"] = {
  1387          "bucket_name": bucket_name,
  1388          "object_name": object_name,
  1389      }
  1390  
  1391      _CLIENT.make_bucket(bucket_name)
  1392      try:
  1393          size = 1 * KB
  1394          _CLIENT.put_object(bucket_name, object_name, LimitedRandomReader(size), size)
  1395          presigned_get_object_url = _CLIENT.presigned_get_object(
  1396              bucket_name, object_name, timedelta(seconds=120)
  1397          )
  1398          response = HTTP.urlopen("GET", presigned_get_object_url)
  1399          if response.status != 200:
  1400              raise Exception(
  1401                  f"Presigned GET object URL {presigned_get_object_url} failed; "
  1402                  f"code: {response.code}, error: {response.data}"
  1403              )
  1404  
  1405          log_entry["args"]["presigned_get_object_url"] = presigned_get_object_url
  1406  
  1407          response = HTTP.urlopen("GET", presigned_get_object_url)
  1408  
  1409          log_entry["args"]["response.status"] = response.status
  1410          log_entry["args"]["response.reason"] = response.reason
  1411          log_entry["args"]["response.headers"] = json.dumps(response.headers.__dict__)
  1412          # pylint: disable=protected-access
  1413          log_entry["args"]["response._body"] = response._body.decode("utf-8")
  1414  
  1415          if response.status != 200:
  1416              raise Exception(
  1417                  f"Presigned GET object URL {presigned_get_object_url} failed; "
  1418                  f"code: {response.code}, error: {response.data}"
  1419              )
  1420  
  1421          presigned_get_object_url = _CLIENT.presigned_get_object(
  1422              bucket_name, object_name, timedelta(seconds=1)
  1423          )
  1424  
  1425          # Wait for 2 seconds for the presigned url to expire
  1426          time.sleep(2)
  1427          response = HTTP.urlopen("GET", presigned_get_object_url)
  1428  
  1429          log_entry["args"]["response.status-2"] = response.status
  1430          log_entry["args"]["response.reason-2"] = response.reason
  1431          log_entry["args"]["response.headers-2"] = json.dumps(response.headers.__dict__)
  1432          log_entry["args"]["response._body-2"] = response._body.decode("utf-8")
  1433  
  1434          # Success with an expired url is considered to be a failure
  1435          if response.status == 200:
  1436              raise ValueError("Presigned get url failed to expire!")
  1437      finally:
  1438          _CLIENT.remove_object(bucket_name, object_name)
  1439          _CLIENT.remove_bucket(bucket_name)
  1440  
  1441  
  1442  def test_presigned_get_object_response_headers(  # pylint: disable=invalid-name
  1443      log_entry,
  1444  ):
  1445      """Test presigned_get_object() with headers."""
  1446  
  1447      # Get a unique bucket_name and object_name
  1448      bucket_name = _gen_bucket_name()
  1449      object_name = f"{uuid4()}"
  1450      content_type = "text/plain"
  1451      content_language = "en_US"
  1452  
  1453      log_entry["args"] = {
  1454          "bucket_name": bucket_name,
  1455          "object_name": object_name,
  1456          "content_type": content_type,
  1457          "content_language": content_language,
  1458      }
  1459  
  1460      _CLIENT.make_bucket(bucket_name)
  1461      try:
  1462          size = 1 * KB
  1463          _CLIENT.put_object(bucket_name, object_name, LimitedRandomReader(size), size)
  1464          presigned_get_object_url = _CLIENT.presigned_get_object(
  1465              bucket_name, object_name, timedelta(seconds=120)
  1466          )
  1467  
  1468          response_headers = {
  1469              "response-content-type": content_type,
  1470              "response-content-language": content_language,
  1471          }
  1472          presigned_get_object_url = _CLIENT.presigned_get_object(
  1473              bucket_name, object_name, timedelta(seconds=120), response_headers
  1474          )
  1475  
  1476          log_entry["args"]["presigned_get_object_url"] = presigned_get_object_url
  1477  
  1478          response = HTTP.urlopen("GET", presigned_get_object_url)
  1479          returned_content_type = response.headers["Content-Type"]
  1480          returned_content_language = response.headers["Content-Language"]
  1481  
  1482          log_entry["args"]["response.status"] = response.status
  1483          log_entry["args"]["response.reason"] = response.reason
  1484          log_entry["args"]["response.headers"] = json.dumps(response.headers.__dict__)
  1485          # pylint: disable=protected-access
  1486          log_entry["args"]["response._body"] = response._body.decode("utf-8")
  1487          log_entry["args"]["returned_content_type"] = returned_content_type
  1488          log_entry["args"]["returned_content_language"] = returned_content_language
  1489  
  1490          if (
  1491              response.status != 200
  1492              or returned_content_type != content_type
  1493              or returned_content_language != content_language
  1494          ):
  1495              raise Exception(
  1496                  "Presigned GET object URL {presigned_get_object_url} failed; "
  1497                  "code: {response.code}, error: {response.data}"
  1498              )
  1499      finally:
  1500          _CLIENT.remove_object(bucket_name, object_name)
  1501          _CLIENT.remove_bucket(bucket_name)
  1502  
  1503  
  1504  def test_presigned_get_object_version(log_entry):  # pylint: disable=invalid-name
  1505      """Test presigned_get_object() of versioned object."""
  1506  
  1507      # Get a unique bucket_name and object_name
  1508      bucket_name = _gen_bucket_name()
  1509      object_name = f"{uuid4()}"
  1510  
  1511      log_entry["args"] = {
  1512          "bucket_name": bucket_name,
  1513          "object_name": object_name,
  1514      }
  1515  
  1516      _CLIENT.make_bucket(bucket_name)
  1517      version_id = None
  1518      try:
  1519          _CLIENT.set_bucket_versioning(bucket_name, VersioningConfig(ENABLED))
  1520          size = 1 * KB
  1521          result = _CLIENT.put_object(
  1522              bucket_name,
  1523              object_name,
  1524              LimitedRandomReader(size),
  1525              size,
  1526          )
  1527          version_id = result.version_id
  1528          presigned_get_object_url = _CLIENT.presigned_get_object(
  1529              bucket_name,
  1530              object_name,
  1531              version_id=version_id,
  1532          )
  1533          response = HTTP.urlopen("GET", presigned_get_object_url)
  1534          if response.status != 200:
  1535              raise Exception(
  1536                  f"Presigned GET object URL {presigned_get_object_url} failed; "
  1537                  f"code: {response.code}, error: {response.data}"
  1538              )
  1539      finally:
  1540          _CLIENT.remove_object(bucket_name, object_name, version_id=version_id)
  1541          _CLIENT.remove_bucket(bucket_name)
  1542  
  1543  
  1544  def test_presigned_put_object_default_expiry(log_entry):  # pylint: disable=invalid-name
  1545      """Test presigned_put_object() with default expiry."""
  1546  
  1547      # Get a unique bucket_name and object_name
  1548      bucket_name = _gen_bucket_name()
  1549      object_name = f"{uuid4()}"
  1550  
  1551      log_entry["args"] = {
  1552          "bucket_name": bucket_name,
  1553          "object_name": object_name,
  1554      }
  1555  
  1556      _CLIENT.make_bucket(bucket_name)
  1557      try:
  1558          presigned_put_object_url = _CLIENT.presigned_put_object(
  1559              bucket_name, object_name
  1560          )
  1561          response = HTTP.urlopen(
  1562              "PUT", presigned_put_object_url, LimitedRandomReader(1 * KB)
  1563          )
  1564          if response.status != 200:
  1565              raise Exception(
  1566                  f"Presigned PUT object URL {presigned_put_object_url} failed; "
  1567                  f"code: {response.code}, error: {response.data}"
  1568              )
  1569          _CLIENT.stat_object(bucket_name, object_name)
  1570      finally:
  1571          _CLIENT.remove_object(bucket_name, object_name)
  1572          _CLIENT.remove_bucket(bucket_name)
  1573  
  1574  
  1575  def test_presigned_put_object_expiry(log_entry):  # pylint: disable=invalid-name
  1576      """Test presigned_put_object() with expiry."""
  1577  
  1578      # Get a unique bucket_name and object_name
  1579      bucket_name = _gen_bucket_name()
  1580      object_name = f"{uuid4()}"
  1581  
  1582      log_entry["args"] = {
  1583          "bucket_name": bucket_name,
  1584          "object_name": object_name,
  1585      }
  1586  
  1587      _CLIENT.make_bucket(bucket_name)
  1588      try:
  1589          presigned_put_object_url = _CLIENT.presigned_put_object(
  1590              bucket_name, object_name, timedelta(seconds=1)
  1591          )
  1592          # Wait for 2 seconds for the presigned url to expire
  1593          time.sleep(2)
  1594          response = HTTP.urlopen(
  1595              "PUT", presigned_put_object_url, LimitedRandomReader(1 * KB)
  1596          )
  1597          if response.status == 200:
  1598              raise ValueError("Presigned put url failed to expire!")
  1599      finally:
  1600          _CLIENT.remove_object(bucket_name, object_name)
  1601          _CLIENT.remove_bucket(bucket_name)
  1602  
  1603  
  1604  def test_presigned_post_policy(log_entry):
  1605      """Test presigned_post_policy()."""
  1606  
  1607      # Get a unique bucket_name and object_name
  1608      bucket_name = _gen_bucket_name()
  1609  
  1610      log_entry["args"] = {
  1611          "bucket_name": bucket_name,
  1612      }
  1613  
  1614      _CLIENT.make_bucket(bucket_name)
  1615      try:
  1616          no_of_days = 10
  1617          prefix = "objectPrefix/"
  1618  
  1619          policy = PostPolicy(
  1620              bucket_name,
  1621              datetime.utcnow() + timedelta(days=no_of_days),
  1622          )
  1623          policy.add_starts_with_condition("key", prefix)
  1624          policy.add_content_length_range_condition(64 * KB, 10 * MB)
  1625          policy.add_starts_with_condition("Content-Type", "image/")
  1626          log_entry["args"]["post_policy"] = {
  1627              "prefix": prefix,
  1628              "expires_in_days": no_of_days,
  1629              "content_length_range": "64KiB to 10MiB",
  1630              "Content-Type": "image/",
  1631          }
  1632          _CLIENT.presigned_post_policy(policy)
  1633      finally:
  1634          _CLIENT.remove_bucket(bucket_name)
  1635  
  1636  
  1637  def test_thread_safe(log_entry):
  1638      """Test thread safety."""
  1639  
  1640      # Create sha-sum value for the user provided
  1641      # source file, 'test_file'
  1642      test_file_sha_sum = _get_sha256sum(_LARGE_FILE)
  1643  
  1644      # Get a unique bucket_name and object_name
  1645      bucket_name = _gen_bucket_name()
  1646      object_name = f"{uuid4()}"
  1647  
  1648      log_entry["args"] = {
  1649          "bucket_name": bucket_name,
  1650          "object_name": object_name,
  1651      }
  1652  
  1653      # A list of exceptions raised by get_object_and_check
  1654      # called in multiple threads.
  1655      exceptions = []
  1656  
  1657      # get_object_and_check() downloads an object, stores it in a file
  1658      # and then calculates its checksum. In case of mismatch, a new
  1659      # exception is generated and saved in exceptions.
  1660      def get_object_and_check(index):
  1661          try:
  1662              local_file = f"copied_file_{index}"
  1663              _CLIENT.fget_object(bucket_name, object_name, local_file)
  1664              copied_file_sha_sum = _get_sha256sum(local_file)
  1665              # Compare sha-sum values of the source file and the copied one
  1666              if test_file_sha_sum != copied_file_sha_sum:
  1667                  raise ValueError(
  1668                      "Sha-sum mismatch on multi-threaded put and " "get objects"
  1669                  )
  1670          except Exception as exc:  # pylint: disable=broad-except
  1671              exceptions.append(exc)
  1672          finally:
  1673              # Remove downloaded file
  1674              _ = os.path.isfile(local_file) and os.remove(local_file)
  1675  
  1676      _CLIENT.make_bucket(bucket_name)
  1677      no_of_threads = 5
  1678      try:
  1679          # Put/Upload 'no_of_threads' many objects
  1680          # simultaneously using multi-threading
  1681          for _ in range(no_of_threads):
  1682              thread = Thread(
  1683                  target=_CLIENT.fput_object, args=(bucket_name, object_name, _LARGE_FILE)
  1684              )
  1685              thread.start()
  1686              thread.join()
  1687  
  1688          # Get/Download 'no_of_threads' many objects
  1689          # simultaneously using multi-threading
  1690          thread_list = []
  1691          for i in range(no_of_threads):
  1692              # Create dynamic/varying names for to be created threads
  1693              thread_name = f"thread_{i}"
  1694              vars()[thread_name] = Thread(target=get_object_and_check, args=(i,))
  1695              vars()[thread_name].start()
  1696              thread_list.append(vars()[thread_name])
  1697  
  1698          # Wait until all threads to finish
  1699          for thread in thread_list:
  1700              thread.join()
  1701  
  1702          if exceptions:
  1703              raise exceptions[0]
  1704      finally:
  1705          _CLIENT.remove_object(bucket_name, object_name)
  1706          _CLIENT.remove_bucket(bucket_name)
  1707  
  1708  
  1709  def test_get_bucket_policy(log_entry):
  1710      """Test get_bucket_policy()."""
  1711  
  1712      # Get a unique bucket_name
  1713      bucket_name = _gen_bucket_name()
  1714      log_entry["args"] = {
  1715          "bucket_name": bucket_name,
  1716      }
  1717      _CLIENT.make_bucket(bucket_name)
  1718      try:
  1719          _CLIENT.get_bucket_policy(bucket_name)
  1720      except S3Error as exc:
  1721          if exc.code != "NoSuchBucketPolicy":
  1722              raise
  1723      finally:
  1724          _CLIENT.remove_bucket(bucket_name)
  1725  
  1726  
  1727  def _get_policy_actions(stat):
  1728      """Get policy actions from stat information."""
  1729  
  1730      def listit(value):
  1731          return value if isinstance(value, list) else [value]
  1732  
  1733      actions = [listit(s.get("Action")) for s in stat if s.get("Action")]
  1734      actions = list(
  1735          set(item.replace("s3:", "") for sublist in actions for item in sublist)
  1736      )
  1737      actions.sort()
  1738      return actions
  1739  
  1740  
  1741  def _validate_policy(bucket_name, policy):
  1742      """Validate policy."""
  1743      policy_dict = json.loads(_CLIENT.get_bucket_policy(bucket_name))
  1744      actions = _get_policy_actions(policy_dict.get("Statement"))
  1745      expected_actions = _get_policy_actions(policy.get("Statement"))
  1746      return expected_actions == actions
  1747  
  1748  
  1749  def test_get_bucket_notification(log_entry):
  1750      """Test get_bucket_notification()."""
  1751  
  1752      # Get a unique bucket_name
  1753      bucket_name = _gen_bucket_name()
  1754      log_entry["args"] = {
  1755          "bucket_name": bucket_name,
  1756      }
  1757  
  1758      _CLIENT.make_bucket(bucket_name)
  1759      try:
  1760          config = _CLIENT.get_bucket_notification(bucket_name)
  1761          if (
  1762              config.cloud_func_config_list
  1763              or config.queue_config_list
  1764              or config.topic_config_list
  1765          ):
  1766              raise ValueError("Failed to receive an empty bucket notification")
  1767      finally:
  1768          _CLIENT.remove_bucket(bucket_name)
  1769  
  1770  
  1771  def test_set_bucket_policy_readonly(log_entry):
  1772      """Test set_bucket_policy() with readonly policy."""
  1773  
  1774      # Get a unique bucket_name
  1775      bucket_name = _gen_bucket_name()
  1776      log_entry["args"] = {
  1777          "bucket_name": bucket_name,
  1778      }
  1779  
  1780      _CLIENT.make_bucket(bucket_name)
  1781      try:
  1782          # read-only policy
  1783          policy = {
  1784              "Version": "2012-10-17",
  1785              "Statement": [
  1786                  {
  1787                      "Sid": "",
  1788                      "Effect": "Allow",
  1789                      "Principal": {"AWS": "*"},
  1790                      "Action": "s3:GetBucketLocation",
  1791                      "Resource": "arn:aws:s3:::" + bucket_name,
  1792                  },
  1793                  {
  1794                      "Sid": "",
  1795                      "Effect": "Allow",
  1796                      "Principal": {"AWS": "*"},
  1797                      "Action": "s3:ListBucket",
  1798                      "Resource": "arn:aws:s3:::" + bucket_name,
  1799                  },
  1800                  {
  1801                      "Sid": "",
  1802                      "Effect": "Allow",
  1803                      "Principal": {"AWS": "*"},
  1804                      "Action": "s3:GetObject",
  1805                      "Resource": f"arn:aws:s3:::{bucket_name}/*",
  1806                  },
  1807              ],
  1808          }
  1809          # Set read-only policy
  1810          _CLIENT.set_bucket_policy(bucket_name, json.dumps(policy))
  1811          # Validate if the policy is set correctly
  1812          if not _validate_policy(bucket_name, policy):
  1813              raise ValueError("Failed to set ReadOnly bucket policy")
  1814      finally:
  1815          _CLIENT.remove_bucket(bucket_name)
  1816  
  1817  
  1818  def test_set_bucket_policy_readwrite(log_entry):  # pylint: disable=invalid-name
  1819      """Test set_bucket_policy() with read/write policy."""
  1820  
  1821      # Get a unique bucket_name
  1822      bucket_name = _gen_bucket_name()
  1823      log_entry["args"] = {
  1824          "bucket_name": bucket_name,
  1825      }
  1826  
  1827      _CLIENT.make_bucket(bucket_name)
  1828      try:
  1829          # Read-write policy
  1830          policy = {
  1831              "Version": "2012-10-17",
  1832              "Statement": [
  1833                  {
  1834                      "Action": ["s3:GetBucketLocation"],
  1835                      "Sid": "",
  1836                      "Resource": ["arn:aws:s3:::" + bucket_name],
  1837                      "Effect": "Allow",
  1838                      "Principal": {"AWS": "*"},
  1839                  },
  1840                  {
  1841                      "Action": ["s3:ListBucket"],
  1842                      "Sid": "",
  1843                      "Resource": ["arn:aws:s3:::" + bucket_name],
  1844                      "Effect": "Allow",
  1845                      "Principal": {"AWS": "*"},
  1846                  },
  1847                  {
  1848                      "Action": ["s3:ListBucketMultipartUploads"],
  1849                      "Sid": "",
  1850                      "Resource": ["arn:aws:s3:::" + bucket_name],
  1851                      "Effect": "Allow",
  1852                      "Principal": {"AWS": "*"},
  1853                  },
  1854                  {
  1855                      "Action": [
  1856                          "s3:ListMultipartUploadParts",
  1857                          "s3:GetObject",
  1858                          "s3:AbortMultipartUpload",
  1859                          "s3:DeleteObject",
  1860                          "s3:PutObject",
  1861                      ],
  1862                      "Sid": "",
  1863                      "Resource": [f"arn:aws:s3:::{bucket_name}/*"],
  1864                      "Effect": "Allow",
  1865                      "Principal": {"AWS": "*"},
  1866                  },
  1867              ],
  1868          }
  1869          # Set read-write policy
  1870          _CLIENT.set_bucket_policy(bucket_name, json.dumps(policy))
  1871          # Validate if the policy is set correctly
  1872          if not _validate_policy(bucket_name, policy):
  1873              raise ValueError("Failed to set ReadOnly bucket policy")
  1874      finally:
  1875          _CLIENT.remove_bucket(bucket_name)
  1876  
  1877  
  1878  def _test_remove_objects(log_entry, version_check=False):
  1879      """Test remove_objects()."""
  1880  
  1881      # Get a unique bucket_name
  1882      bucket_name = _gen_bucket_name()
  1883      log_entry["args"] = {
  1884          "bucket_name": bucket_name,
  1885      }
  1886  
  1887      _CLIENT.make_bucket(bucket_name)
  1888      object_names = []
  1889      delete_object_list = []
  1890      try:
  1891          if version_check:
  1892              _CLIENT.set_bucket_versioning(
  1893                  bucket_name,
  1894                  VersioningConfig(ENABLED),
  1895              )
  1896          size = 1 * KB
  1897          # Upload some new objects to prepare for multi-object delete test.
  1898          for i in range(10):
  1899              object_name = f"prefix-{i}"
  1900              result = _CLIENT.put_object(
  1901                  bucket_name,
  1902                  object_name,
  1903                  LimitedRandomReader(size),
  1904                  size,
  1905              )
  1906              object_names.append(
  1907                  (object_name, result.version_id) if version_check else object_name,
  1908              )
  1909          log_entry["args"]["delete_object_list"] = object_names
  1910  
  1911          for args in object_names:
  1912              delete_object_list.append(
  1913                  DeleteObject(args)
  1914                  if isinstance(args, str)
  1915                  else DeleteObject(args[0], args[1])
  1916              )
  1917          # delete the objects in a single library call.
  1918          errs = _CLIENT.remove_objects(bucket_name, delete_object_list)
  1919          for err in errs:
  1920              raise ValueError(f"Remove objects err: {err}")
  1921      finally:
  1922          # Try to clean everything to keep our server intact
  1923          errs = _CLIENT.remove_objects(bucket_name, delete_object_list)
  1924          for err in errs:
  1925              raise ValueError(f"Remove objects err: {err}")
  1926          _CLIENT.remove_bucket(bucket_name)
  1927  
  1928  
  1929  def test_remove_objects(log_entry):
  1930      """Test remove_objects()."""
  1931      _test_remove_objects(log_entry)
  1932  
  1933  
  1934  def test_remove_object_versions(log_entry):
  1935      """Test remove_objects()."""
  1936      _test_remove_objects(log_entry, version_check=True)
  1937  
  1938  
  1939  def test_remove_bucket(log_entry):
  1940      """Test remove_bucket()."""
  1941  
  1942      # Get a unique bucket_name
  1943      bucket_name = _gen_bucket_name()
  1944      if _IS_AWS:
  1945          bucket_name += ".unique"
  1946  
  1947      log_entry["args"] = {
  1948          "bucket_name": bucket_name,
  1949      }
  1950  
  1951      if _IS_AWS:
  1952          log_entry["args"]["location"] = location = AWS_REGION
  1953          _CLIENT.make_bucket(bucket_name, location)
  1954      else:
  1955          _CLIENT.make_bucket(bucket_name)
  1956  
  1957      # Removing bucket. This operation will only work if your bucket is empty.
  1958      _CLIENT.remove_bucket(bucket_name)
  1959  
  1960  
  1961  def main():
  1962      """
  1963      Functional testing of minio python library.
  1964      """
  1965      # pylint: disable=global-statement
  1966      global _CLIENT, _TEST_FILE, _LARGE_FILE, _IS_AWS
  1967  
  1968      access_key = os.getenv("ACCESS_KEY")
  1969      secret_key = os.getenv("SECRET_KEY")
  1970      server_endpoint = os.getenv("SERVER_ENDPOINT", "play.min.io")
  1971      secure = os.getenv("ENABLE_HTTPS", "1") == "1"
  1972  
  1973      if server_endpoint == "play.min.io":
  1974          access_key = "Q3AM3UQ867SPQQA43P2F"
  1975          secret_key = "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG"
  1976          secure = True
  1977  
  1978      _CLIENT = Minio(server_endpoint, access_key, secret_key, secure=secure)
  1979      _IS_AWS = ".amazonaws.com" in server_endpoint
  1980  
  1981      # Check if we are running in the mint environment.
  1982      data_dir = os.getenv("DATA_DIR", "/mint/data")
  1983  
  1984      is_mint_env = (
  1985          os.path.exists(data_dir)
  1986          and os.path.exists(os.path.join(data_dir, "datafile-1-MB"))
  1987          and os.path.exists(os.path.join(data_dir, "datafile-11-MB"))
  1988      )
  1989  
  1990      # Enable trace
  1991      # _CLIENT.trace_on(sys.stderr)
  1992  
  1993      _TEST_FILE = "datafile-1-MB"
  1994      _LARGE_FILE = "datafile-11-MB"
  1995      if is_mint_env:
  1996          # Choose data files
  1997          _TEST_FILE = os.path.join(data_dir, "datafile-1-MB")
  1998          _LARGE_FILE = os.path.join(data_dir, "datafile-11-MB")
  1999      else:
  2000          with open(_TEST_FILE, "wb") as file_data:
  2001              shutil.copyfileobj(LimitedRandomReader(1 * MB), file_data)
  2002          with open(_LARGE_FILE, "wb") as file_data:
  2003              shutil.copyfileobj(LimitedRandomReader(11 * MB), file_data)
  2004  
  2005      ssec = None
  2006      if secure:
  2007          # Create a Customer Key of 32 Bytes for Server Side Encryption (SSE-C)
  2008          cust_key = b"AABBCCDDAABBCCDDAABBCCDDAABBCCDD"
  2009          # Create an SSE-C object with provided customer key
  2010          ssec = SseCustomerKey(cust_key)
  2011  
  2012      if os.getenv("MINT_MODE") == "full":
  2013          tests = {
  2014              test_make_bucket_default_region: None,
  2015              test_make_bucket_with_region: None,
  2016              test_negative_make_bucket_invalid_name: None,
  2017              test_list_buckets: None,
  2018              test_fput_object_small_file: {"sse": ssec} if ssec else None,
  2019              test_fput_object_large_file: {"sse": ssec} if ssec else None,
  2020              test_fput_object_with_content_type: None,
  2021              test_copy_object_no_copy_condition: (
  2022                  {"ssec_copy": ssec, "ssec": ssec} if ssec else None
  2023              ),
  2024              test_copy_object_etag_match: None,
  2025              test_copy_object_with_metadata: None,
  2026              test_copy_object_negative_etag_match: None,
  2027              test_copy_object_modified_since: None,
  2028              test_copy_object_unmodified_since: None,
  2029              test_put_object: {"sse": ssec} if ssec else None,
  2030              test_negative_put_object_with_path_segment: None,
  2031              test_stat_object: {"sse": ssec} if ssec else None,
  2032              test_stat_object_version: {"sse": ssec} if ssec else None,
  2033              test_get_object: {"sse": ssec} if ssec else None,
  2034              test_get_object_version: {"sse": ssec} if ssec else None,
  2035              test_fget_object: {"sse": ssec} if ssec else None,
  2036              test_fget_object_version: {"sse": ssec} if ssec else None,
  2037              test_get_object_with_default_length: None,
  2038              test_get_partial_object: {"sse": ssec} if ssec else None,
  2039              test_list_objects_v1: None,
  2040              test_list_object_v1_versions: None,
  2041              test_list_objects_with_prefix: None,
  2042              test_list_objects_with_1001_files: None,
  2043              test_list_objects: None,
  2044              test_list_object_versions: None,
  2045              test_presigned_get_object_default_expiry: None,
  2046              test_presigned_get_object_expiry: None,
  2047              test_presigned_get_object_response_headers: None,
  2048              test_presigned_get_object_version: None,
  2049              test_presigned_put_object_default_expiry: None,
  2050              test_presigned_put_object_expiry: None,
  2051              test_presigned_post_policy: None,
  2052              test_thread_safe: None,
  2053              test_get_bucket_policy: None,
  2054              test_set_bucket_policy_readonly: None,
  2055              test_set_bucket_policy_readwrite: None,
  2056              test_get_bucket_notification: None,
  2057              test_select_object_content: None,
  2058          }
  2059      else:
  2060          tests = {
  2061              test_make_bucket_default_region: None,
  2062              test_list_buckets: None,
  2063              test_put_object: {"sse": ssec} if ssec else None,
  2064              test_stat_object: {"sse": ssec} if ssec else None,
  2065              test_stat_object_version: {"sse": ssec} if ssec else None,
  2066              test_get_object: {"sse": ssec} if ssec else None,
  2067              test_get_object_version: {"sse": ssec} if ssec else None,
  2068              test_list_objects: None,
  2069              test_presigned_get_object_default_expiry: None,
  2070              test_presigned_put_object_default_expiry: None,
  2071              test_presigned_post_policy: None,
  2072              test_copy_object_no_copy_condition: (
  2073                  {"ssec_copy": ssec, "ssec": ssec} if ssec else None
  2074              ),
  2075              test_select_object_content: None,
  2076              test_get_bucket_policy: None,
  2077              test_set_bucket_policy_readonly: None,
  2078              test_get_bucket_notification: None,
  2079          }
  2080  
  2081      tests.update(
  2082          {
  2083              test_remove_object: None,
  2084              test_remove_object_version: None,
  2085              test_remove_objects: None,
  2086              test_remove_object_versions: None,
  2087              test_remove_bucket: None,
  2088          },
  2089      )
  2090  
  2091      for test_name, arg_list in tests.items():
  2092          args = ()
  2093          kwargs = {}
  2094          _call_test(test_name, *args, **kwargs)
  2095  
  2096          if arg_list:
  2097              args = ()
  2098              kwargs = arg_list
  2099              _call_test(test_name, *args, **kwargs)  # pylint: disable=not-a-mapping
  2100  
  2101      # Remove temporary files.
  2102      if not is_mint_env:
  2103          os.remove(_TEST_FILE)
  2104          os.remove(_LARGE_FILE)
  2105  
  2106  
  2107  if __name__ == "__main__":
  2108      try:
  2109          main()
  2110      except TestFailed:
  2111          sys.exit(1)
  2112      except Exception as excp:  # pylint: disable=broad-except
  2113          print(excp)
  2114          sys.exit(-1)