github.com/uber/kraken@v0.1.4/test/python/test_core.py (about)

     1  # Copyright (c) 2016-2019 Uber Technologies, Inc.
     2  #
     3  # Licensed under the Apache License, Version 2.0 (the "License");
     4  # you may not use this file except in compliance with the License.
     5  # You may obtain a copy of the License at
     6  #
     7  #     http://www.apache.org/licenses/LICENSE-2.0
     8  #
     9  # Unless required by applicable law or agreed to in writing, software
    10  # distributed under the License is distributed on an "AS IS" BASIS,
    11  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    12  # See the License for the specific language governing permissions and
    13  # limitations under the License.
    14  from __future__ import absolute_import
    15  
    16  import hashlib
    17  import os
    18  import time
    19  from threading import Thread
    20  
    21  import pytest
    22  import requests
    23  
    24  from utils import concurrently_apply
    25  from utils import tls_opts
    26  
    27  
    28  def test_origin_upload_no_client_cert(origin_cluster):
    29      name, blob = _generate_blob()
    30      addr = origin_cluster.get_location(name)
    31      url = 'https://{addr}/namespace/testfs/blobs/sha256:{name}/uploads'.format(
    32              addr=addr, name=name)
    33      res = requests.post(url, **tls_opts())
    34      assert res.status_code == 403
    35  
    36  
    37  def test_concurrent_agent_downloads(origin_cluster, agent_factory):
    38      name, blob = _generate_blob()
    39  
    40      origin_cluster.upload(name, blob)
    41  
    42      # TODO(codyg): This test struggles with more than 4 agents when we limit
    43      # the max origin connections to 1. I suspect this is because the agents
    44      # form isolated networks.
    45      with agent_factory.create(4) as agents:
    46          concurrently_apply(lambda agent: agent.download(name, blob), agents)
    47  
    48  
    49  def test_blob_distribution_resilient_to_remote_backend_unavailability(testfs, origin_cluster, agent):
    50      testfs.stop()
    51  
    52      name, blob = _generate_blob()
    53  
    54      origin_cluster.upload(name, blob)
    55  
    56      agent.download(name, blob)
    57  
    58  
    59  def test_agent_download_after_remote_backend_upload(testfs, agent):
    60      name, blob = _generate_blob()
    61  
    62      testfs.upload(name, blob)
    63  
    64      agent.download(name, blob)
    65  
    66  
    67  def test_agent_download_after_origin_data_loss_after_origin_upload(origin_cluster, agent):
    68      name, blob = _generate_blob()
    69  
    70      origin_cluster.upload(name, blob)
    71  
    72      # Wipe out all data in the origin cluster.
    73      for origin in origin_cluster:
    74          origin.restart(wipe_disk=True)
    75  
    76      agent.download(name, blob)
    77  
    78  
    79  def test_agent_download_returns_500_when_remote_backend_unavailable(testfs, agent):
    80      name, _ = _generate_blob()
    81  
    82      testfs.stop()
    83  
    84      with pytest.raises(requests.HTTPError) as exc_info:
    85          agent.download(name, None)
    86  
    87      assert exc_info.value.response.status_code == 500
    88  
    89  
    90  def test_agent_download_404(agent):
    91      name, _ = _generate_blob()
    92  
    93      with pytest.raises(requests.HTTPError) as exc_info:
    94          agent.download(name, None)
    95  
    96      assert exc_info.value.response.status_code == 404
    97  
    98  
    99  def test_agent_download_resilient_to_invalid_tracker_cache(origin_cluster, agent):
   100      name, blob = _generate_blob()
   101  
   102      origin_cluster.upload(name, blob)
   103  
   104      agent.download(name, blob)
   105  
   106      # Wipe out all data in the agent and origins, but leave metainfo cached in tracker.
   107  
   108      agent.restart(wipe_disk=True)
   109  
   110      for origin in origin_cluster:
   111          origin.restart(wipe_disk=True)
   112  
   113      # Origin should refresh blob even though metainfo was never requested.
   114      agent.download(name, blob)
   115  
   116  
   117  def test_agent_download_resilient_to_offline_origin(origin_cluster, agent):
   118      name, blob = _generate_blob()
   119  
   120      origin_cluster.upload(name, blob)
   121  
   122      # With max_replica=2, we still have one replica left.
   123      list(origin_cluster)[1].stop()
   124  
   125      agent.download(name, blob)
   126  
   127  
   128  @pytest.mark.xfail
   129  def test_agent_download_resilient_to_initial_offline_origin(origin_cluster, agent):
   130      name, blob = _generate_blob()
   131  
   132      origin_cluster.upload(name, blob)
   133  
   134      for origin in origin_cluster:
   135          origin.stop()
   136  
   137      result = {'error': None}
   138      def download():
   139          try:
   140              agent.download(name, blob)
   141          except Exception as e:
   142              result['error'] = e
   143  
   144      # Agent initially has no one to download from since origin is offline.
   145      t = Thread(target=download)
   146      t.start()
   147  
   148      time.sleep(2)
   149  
   150      for origin in origin_cluster:
   151          origin.start()
   152  
   153      t.join()
   154  
   155      assert result['error'] is None
   156  
   157  
   158  def _generate_blob():
   159      blob = os.urandom(5 * 1 << 20) # 5MB
   160      h = hashlib.sha256()
   161      h.update(blob)
   162      return h.hexdigest(), blob