github.com/rita33cool1/iot-system-gateway@v0.0.0-20200911033302-e65bde238cc5/gateway/Implementation/Algo/Download/lagrange_download_algo.py (about)

     1  #!/usr/bin/env python
     2  # -*- coding: utf-8 -*-
     3  """
     4  python download_algo.py {registry_size} {bandwidth} {analytics} {IDA_method}
     5  {registry_size}: 6 (default 6G)
     6  {bandwidth}: 176000 (default 176Mbps)
     7  {analytics}: 'analytics.input' (default)
     8  'analytics.input': 
     9  audio1-18                                                                                            
    10  audio4-1
    11  audio4-13
    12  ...
    13  {method}: DP (Dynamic Programing), Lagrange (Lagrangian)
    14  
    15  images_dict structure
    16  {
    17    "ImageName":
    18    {
    19        "LayerID": is_exist (int type)
    20        "LayerID": is_exist (int type)
    21            ...
    22    }
    23    "ImageName": 
    24    ...
    25  }
    26  """
    27  __author__ = 'YuJung Wang'
    28  __date__ = '2020/04'
    29  
    30  import os
    31  import sys
    32  import time
    33  import math
    34  import json
    35  # list.copy only support since python 3.3 so use this instead
    36  import copy
    37  import docker
    38  import commands
    39  import subprocess
    40  # To get the same items in a list
    41  import collections
    42  # write log
    43  from datetime import datetime
    44  from itertools import takewhile
    45  import paho.mqtt.publish as publish
    46  
    47  
    48  overall_images = {'yolo1':{}, 'yolo2':{}, 'yolo3':{}, 'yolo4':{},'yolo5':{}, 
    49            'audio1':{}, 'audio2':{}, 'audio3':{}, 'audio4':{}, 'audio5':{}}
    50  
    51  const_overall_containers = { 
    52      "yolo1":{"CPU":55.6,"RAM":13.08,"SIZE":1.81,"BW":150, "COMLAYER":0, "LAYER":0},
    53      "yolo2":{"CPU":55.6,"RAM":13.08,"SIZE":2.25,"BW":150, "COMLAYER":0, "LAYER":0},
    54      "yolo3":{"CPU":55.6,"RAM":13.08,"SIZE":2.61,"BW":150, "COMLAYER":0, "LAYER":0},
    55      "yolo4":{"CPU":55.6,"RAM":13.08,"SIZE":2.96,"BW":150, "COMLAYER":0, "LAYER":0},
    56      "yolo5":{"CPU":55.6,"RAM":13.08,"SIZE":3.6,"BW":150, "COMLAYER":0, "LAYER":0},
    57      "audio1":{"CPU":51.3,"RAM":7.84,"SIZE":2.03,"BW":3260, "COMLAYER":0, "LAYER":0},
    58      "audio2":{"CPU":51.3,"RAM":7.84,"SIZE":2.49,"BW":3260, "COMLAYER":0, "LAYER":0},
    59      "audio3":{"CPU":51.3,"RAM":7.84,"SIZE":2.85,"BW":3260, "COMLAYER":0, "LAYER":0},
    60      "audio4":{"CPU":51.3,"RAM":7.84,"SIZE":3.13,"BW":3260, "COMLAYER":0, "LAYER":0},
    61      "audio5":{"CPU":51.3,"RAM":7.84,"SIZE":3.48,"BW":3260, "COMLAYER":0, "LAYER":0}
    62  }
    63  
    64  overall_containers = copy.deepcopy(const_overall_containers)
    65  overall_com_layers = {}
    66  overall_layers = {}
    67  #overall_repo = 'yujungwang/iscc19'
    68  overall_repo = os.environ['DOCKER_PROVIDER'] + '/' + os.environ['DOCKER_REPO']
    69  
    70  
    71  def read_image_json(imgs_list, is_exist):
    72      images_dict = {}
    73      for img in imgs_list:
    74          images_dict[img] = {}
    75  
    76      com_layers = {}    
    77      layers = {}    
    78      for img in images_dict:
    79          img_name = 's2-' + img[:-1] + '-' + img[-1]
    80          #print('img_name: ' + img_name)
    81          with open('/home/minion/YC/iscc19/Implementation/Algo/Download/image_'+img_name+'.json', 'r') as reader:
    82              jf = json.loads(reader.read())
    83              #images_dict[img]['Layers'] = jf['Layers']
    84              for l in jf['Layers']:
    85                  if l['LayerID'] not in com_layers:
    86                      # Convert unit to Byte
    87                      com_size, unit = l['CompressLayerSize'].split(' ', 1)
    88                      if unit == 'GB':
    89                          com_layers[l['LayerID']] = float(com_size)*1000*1000*1000
    90                      elif unit == 'MB':
    91                          com_layers[l['LayerID']] = float(com_size)*1000*1000
    92                      elif unit == 'KB':
    93                          com_layers[l['LayerID']] = float(com_size)*1000
    94                      else: # B
    95                          com_layers[l['LayerID']] = float(com_size)
    96  
    97                  images_dict[img][l['LayerID']] = is_exist
    98                  # Bytes
    99                  layers[l['LayerID']] = float(l['LayerSize']) 
   100      return images_dict, com_layers, layers
   101  
   102  
   103  def get_exist_images():
   104      client = docker.from_env()
   105      exist_images = client.images.list() 
   106      images_list = []   
   107      for image in exist_images:
   108          repo = str(image).split(':')[1].split(':')[0].replace("'","").replace(" ","")
   109          tag = str(image).split(':')[2].replace("'","").replace(" ","").replace(">","")
   110          if overall_repo == repo:
   111              name = tag[3:-2] + tag[-1]
   112              images_list.append(name)
   113      return images_list
   114  
   115  
   116  def get_running_images(analytics):
   117      images = []
   118      client = docker.from_env()
   119      for container in analytics:
   120          a = client.containers.get(container)
   121          newstr = str(a.image).replace("<", "").replace(">","").replace("'","")
   122          #images.append(newstr.split(' ')[1])
   123          repo, app, vers = newstr.split(' ')[1].split('-', 2)
   124          images.append(app+vers)
   125      return images
   126  
   127  
   128  def get_containers():
   129      analytics = []
   130      cmd = 'docker ps| grep k8s'
   131      k8s_containers_info = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT).decode('utf-8')
   132      lines = k8s_containers_info.split('\n')
   133      for line in lines[:-1]:
   134          infos = line.split()
   135          if 'k8s_audio-recognition' in infos[-1] or 'k8s_object-detection' in infos[-1]:
   136              analytics.append(infos[0])
   137      return analytics
   138  
   139  
   140  def get_unreplace_layers(exist_images_list):
   141      unreplaced_images = []
   142      replaced_layer_nums = []
   143      with open('/home/minion/YC/iscc19/Implementation/Algo/Replacement/images_replace.log', 'r') as reader:
   144          for line in reader.readlines():
   145              # If no replace images, then break
   146              if line.strip() == '': break
   147              rep_img, num_layers = line.rstrip().split(',')
   148              rep_img = rep_img.split(':')[1]
   149              unreplaced_images.append(rep_img[3:-2]+rep_img[-1])
   150              replaced_layer_nums.append(int(num_layers))
   151      # Remove existed layers
   152      for img in unreplaced_images:
   153          if img in exist_images_list:
   154              del replaced_layer_nums[unreplaced_images.index(img)]
   155              unreplaced_images.remove(img)
   156      # Read the image information json file to get layers' information
   157      images_dict = {}
   158      for img in unreplaced_images:
   159          images_dict[img] = {}
   160      com_layers = {}
   161      layers = {}
   162      for img in images_dict:
   163          img_name = 's2-' + img[:-1] + '-' + img[-1]
   164          parts = []
   165          with open('/home/minion/YC/iscc19/Implementation/Algo/Download/image_'+img_name+'.json', 'r') as reader:
   166              reads = reader.read().replace('\n', '').replace(' ', '').split('[{', 1)[1]
   167              #print('reads: ' + reads)
   168              parts = reads.split('},{')
   169              #parts = reader.read().replace('\n', '').replace(' ', '').split('[{', 1)[1].split('},{')
   170          # max number of existed layers
   171          max_l_num = len(parts)-replaced_layer_nums[unreplaced_images.index(img)]-1
   172          for i in range(0, len(parts)):
   173              com_size_str = parts[i].split('"CompressLayerSize":"', 1)[1].split('"', 1)[0]
   174              size = parts[i].split('"LayerSize":"', 1)[1].split('"', 1)[0]
   175              l_id = parts[i].split('"LayerID":"', 1)[1].split('"')[0]
   176              if l_id not in com_layers:
   177                  # Convert unit to Byte
   178                  #com_size, unit = com_size_str.split(' ', 1)
   179                  unit = com_size_str[-2:]
   180                  com_size = com_size_str[:-2]
   181                  if unit == 'GB':
   182                      com_layers[l_id] = float(com_size)*1000*1000*1000
   183                  elif unit == 'MB':
   184                      com_layers[l_id] = float(com_size)*1000*1000
   185                  elif unit == 'KB':
   186                      com_layers[l_id] = float(com_size)*1000
   187                  else: # B
   188                      com_layers[l_id] = float(com_size_str[:-1])
   189              # layers before max_l_num are existed (1)
   190              images_dict[img][l_id] = 1 if i <= max_l_num else 0
   191              # Bytes
   192              layers[l_id] = float(size)
   193      # Write back replacement information 
   194      # The new information is different from only if the unreplaced images are existed now 
   195      with open('/home/minion/YC/iscc19/Implementation/Algo/Replacement/images_replace.log', 'w') as writer:
   196          for image in unreplaced_images:
   197              writer.write(overall_repo+':s2-'+image[:-1]+'-'+image[-1]+','+str(replaced_layer_nums[unreplaced_images.index(image)])+'\n')
   198  
   199      # Debug Message
   200      #print('Get unreplaced layers information')
   201      #print('images_dict:', images_dict)
   202      #print('com_layers:', com_layers)
   203      #print('layers:', layers)
   204      #print('keys:', images_dict.keys())
   205      return images_dict, com_layers, layers 
   206  
   207  
   208  def get_layer_size(container, layers):
   209      layer_size = 0
   210      com_layer_size = 0
   211      for l in layers:
   212          if not layers[l]:
   213              layer_size += overall_layers[l]
   214              com_layer_size += overall_com_layers[l]
   215      container['LAYER'] = layer_size
   216      container['COMLAYER'] = com_layer_size
   217  
   218  
   219  def dynamic_program(input_analytics, n, total_size, bandwidth, CPU):
   220      size = overall_containers[input_analytics[n]]['LAYER']
   221      bw = overall_containers[input_analytics[n]]['COMLAYER']
   222      cpu = overall_containers[input_analytics[n]]['CPU']
   223      value = overall_containers[input_analytics[n]]['BW']
   224  
   225      #print 'n: ', n    
   226      #print 'total_size: ', total_size
   227      #print 'size: ', size
   228  
   229      if total_size <= 0 or bandwidth <= 0 or CPU <= 0 or n < 0:
   230          #print('constraintes 0 or n < 0')
   231          #print([])
   232          return 0, []
   233      
   234      #print 'n-1: ', n-1
   235      if (size > total_size) or (bw > bandwidth) or (cpu > CPU):
   236          #print 'constraint not enough'
   237          total_value, analytics = dynamic_program(input_analytics, n-1, total_size, bandwidth, CPU)
   238          #print 'total_value', total_value
   239          #print 'analytics: ', analytics
   240          return total_value, analytics
   241      else:
   242          #print 'constraint bigger enough'
   243          not_includ_value, not_includ_analyts = dynamic_program(input_analytics, n-1, total_size, bandwidth, CPU)
   244          includ_value, includ_analyts = dynamic_program(input_analytics, n-1, total_size-size, bandwidth-bw, CPU-cpu)
   245          #print 'not_includ_analyts: ', not_includ_analyts
   246          #print 'includ_analyts: ', includ_analyts
   247          
   248          if not_includ_value >= includ_value+value:
   249              #print('not_includ_value bigger')
   250              #print 'analytics: ', not_includ_analyts
   251              return not_includ_value, not_includ_analyts
   252          else:
   253              #print('includ_value bigger')
   254              includ_analyts.append(input_analytics[n]) 
   255              #print 'analytics: ', includ_analyts
   256              return includ_value + value, includ_analyts
   257  
   258   
   259  def subproblem(container, lambda_s, lambda_b, lambda_c):
   260      object_function = container['BW']*1000
   261      sum_constraints = container['LAYER'] + container['COMLAYER'] + container['CPU']
   262      relaxed_constraints = lambda_s*container['LAYER'] + lambda_b*container['COMLAYER'] + lambda_c*container['CPU']
   263      
   264      if object_function - relaxed_constraints > 0:
   265          return 1, relaxed_constraints, object_function-relaxed_constraints
   266      else: return 0, 0, 0
   267   
   268  
   269  def lagrange(input_analytics, input_images_dict, total_size, bandwidth, CPU):
   270      # Initialize parameters
   271      t = 0
   272      lambda_s = 0.1
   273      lambda_b = 0.1
   274      lambda_c = 0.1
   275      # time_step 
   276      #alpha = 0.0000000000001
   277      alpha = 0.0000000000001
   278      # relaxed constraints
   279      sum_relaxtion = 0
   280      # stop criteria
   281      stop_range = 100000
   282      # total value
   283      total_value = stop_range + 1
   284      min_value = 0
   285      min_local_e_a = []
   286      min_t = 0
   287  
   288      ## ----- Compute Dual Decomposition ----- ##
   289      old_lambda_s = 0
   290      old_lambda_b = 0
   291      old_lambda_c = 0
   292      old_value = total_value
   293      first_t = True 
   294      while (old_value - total_value) >= 0 and (lambda_s > 0 or lambda_b > 0 or lambda_c > 0):
   295          # relaxed constraints
   296          sum_constraints = 0
   297          sum_value = 0
   298          # object variables
   299          local_e_a = []
   300          # Compute decomposed subproblems
   301          for analytic in input_analytics:
   302              e_a, sum_const, value = subproblem(overall_containers[analytic], lambda_s, lambda_b, lambda_c)
   303              local_e_a.append(e_a)
   304              sum_constraints += sum_const
   305              sum_value += value 
   306              #print('sum_const', sum_const) 
   307          # Update lambdas
   308          old_lambda_s = lambda_s
   309          old_lambda_b = lambda_b
   310          old_lambda_c = lambda_c
   311          lambda_s = lambda_s - alpha * (total_size-sum_constraints)
   312          if lambda_s < 0: lambda_s = 0
   313          lambda_b = lambda_b - alpha * (bandwidth-sum_constraints)
   314          if lambda_b < 0: lambda_b = 0
   315          lambda_c = lambda_c - alpha * (CPU-sum_constraints)
   316          if lambda_c < 0: lambda_c = 0
   317          #print('sum_constraints', sum_constraints)
   318          #print(old_lambda_s)
   319          #print(old_lambda_b)
   320          #print(old_lambda_c)
   321          # compute total value
   322          old_value = total_value
   323          total_value = sum_value + old_lambda_s*total_size + old_lambda_b*bandwidth + old_lambda_c*CPU
   324          if first_t:
   325              old_value = total_value
   326              first_t = False 
   327          #print('iteration: ', t)
   328          #print('sum_value: ', sum_value)
   329          #print ('total_value: ', total_value)
   330          if t == 0:
   331              min_value = total_value
   332              min_local_e_a = local_e_a
   333              min_t = t
   334          elif total_value < min_value: 
   335              min_value = total_value
   336              min_local_e_a = local_e_a
   337              min_t = t
   338          t += 1
   339  
   340      # Debug Message
   341      #print ('min_t: ' + str(min_t))
   342      #print ('min_value: ' + str(min_value))
   343      
   344      return min_local_e_a
   345  
   346  
   347  def download_algo(total_size, bandwidth, CPU, original_analytics, method):
   348      ## ----- Initialization ----- ##
   349      # storage GB -> B
   350      #print ('Before total_size: ' + str(total_size))
   351      total_size *= 1000*1000*1000
   352      #print ('total_size: ' + str(total_size))
   353      # bandwidth (Kbps) -> bps, and meltiply time step 10 minutes
   354      #print ('bandwidth: ' + str(bandwidth))
   355      bandwidth *= 1000*10*60
   356      #print ('bandwidth: ' + str(bandwidth))
   357      # Calculate remaining CPU resource
   358      # Get running image
   359      running_images = get_running_images(get_containers())
   360      for img in running_images:
   361          CPU -= const_overall_containers[img]['CPU']
   362      
   363      global overall_containers
   364      #overall_containers = copy.deepcopy(const_overall_containers)
   365      # Get Existed Images
   366      exist_images_list = get_exist_images()
   367      #print (exist_images_list)
   368      exist_images_dict, exist_com_layers_dict, exist_layers_dict = read_image_json(exist_images_list, 1)
   369      # input images, layers default assume exist
   370      input_images_dict, input_com_layers_dict, input_layers_dict = read_image_json(original_analytics, 1)
   371      global overall_layers
   372      overall_layers = input_layers_dict
   373      global overall_com_layers
   374      overall_com_layers = input_com_layers_dict
   375      # Get Unreplaced layers
   376      unreplace_images_dict, unreplace_com_layers_dict, unreplace_layers_dict = get_unreplace_layers(exist_images_list)        
   377      # Check the layers of input images exist or not
   378      for img in input_images_dict:
   379          for lay in input_images_dict[img]:
   380              try:
   381                  overall_com_layers[lay] = exist_com_layers_dict[lay]
   382              except:
   383                  input_images_dict[img][lay] = 0
   384                  if img in unreplace_images_dict.keys():
   385                      input_images_dict[img][lay] = unreplace_images_dict[img][lay]    
   386      # Get the size of layers which need to be downloaded
   387      for analytic in original_analytics:
   388          get_layer_size(overall_containers[analytic], input_images_dict[analytic]) 
   389      # Calculate the avallable storage size
   390      # Firstly, find all existed layers from existed images and unreplace layers
   391      overall_using_layers = copy.deepcopy(exist_layers_dict)
   392      for img in unreplace_images_dict:
   393          #print('unreplace image:', img)
   394          for lay in unreplace_images_dict[img]:
   395              #print('unreplace layer:', lay)
   396              #print('overall_using_layers:', overall_using_layers)
   397              #print('unreplace_images_dict[img][lay]:', unreplace_images_dict[img][lay])
   398              if lay not in overall_using_layers.keys() and unreplace_images_dict[img][lay]:
   399                  #print('unsing unreplace layer:', lay)
   400                  overall_using_layers[lay] = unreplace_layers_dict[lay]
   401      # Sum the size of all the existed layers
   402      sum_existed_layers_size = 0
   403      for lay in overall_using_layers:
   404          sum_existed_layers_size += overall_using_layers[lay]
   405      #print('sum_existed_layers_size: ' +str(sum_existed_layers_size))
   406      # Total available size = total size - size of existed layers
   407      total_size -= sum_existed_layers_size
   408      #print ('After total_size: ' + str(total_size))
   409  
   410      
   411      ### ----- Lagrange Need ----- ###
   412      if method == 'Lagrange':
   413          # To let the magnitude of cpu is similar to the bandwidth and the size 
   414          for container in overall_containers:
   415              overall_containers[container]['CPU'] *= 1000*1000*10    
   416          #print ('CPU: ' + str(CPU))
   417          CPU *= 1000*1000*10
   418          #print ('CPU: ' + str(CPU))
   419   
   420      # Remove duplicate items and multiply bandwidth
   421      input_analytics = []
   422      duplic_analytics = []
   423      duplicate_num = []
   424      for analytic in original_analytics:
   425          ### ----- Lagrange Need ----- ###
   426          if method == 'Lagrange': 
   427              add_cpu = overall_containers[analytic]['CPU'] + const_overall_containers[analytic]['CPU']*1000*1000*10
   428          ### -----  DP Need ----- ###
   429          else:
   430              add_cpu = overall_containers[analytic]['CPU'] + const_overall_containers[analytic]['CPU']
   431  
   432          if analytic not in input_analytics:
   433              input_analytics.append(analytic)
   434          elif analytic not in duplic_analytics and add_cpu <= CPU:
   435              duplic_analytics.append(analytic)
   436              duplicate_num.append(2)
   437              overall_containers[analytic]['BW'] += const_overall_containers[analytic]['BW']
   438              overall_containers[analytic]['CPU'] = add_cpu
   439          elif add_cpu <= CPU:
   440              index = duplic_analytics.index(analytic)
   441              duplicate_num[index] += 1
   442              overall_containers[analytic]['BW'] += const_overall_containers[analytic]['BW']
   443              overall_containers[analytic]['CPU'] = add_cpu
   444          #else: print('duplicate and over CPU')
   445  
   446      # Reorder items by their ['BW']
   447      #print('input_analytics')
   448      #for analytic in input_analytics:
   449      #    print(analytic)
   450      # Reorder list
   451      input_analytics.sort(key=lambda a: overall_containers[a]['BW'], reverse=True)
   452      #print('reorder_analytics')
   453      #for analytic in input_analytics:
   454      #    print(analytic)
   455              
   456  
   457      # Debug Message
   458      #print('original_analytics')
   459      #for analytic in original_analytics:
   460      #    print(analytic)
   461              
   462      #print('input_analytics')
   463      #for analytic in input_analytics:
   464      #    print(analytic)
   465              
   466      #print('duplic_analytics')
   467      #for analytic in duplic_analytics:
   468      #    print(analytic)
   469              
   470      #print('duplicate_num')
   471      #for num in duplicate_num:
   472      #    print(num)
   473              
   474      #print('input_analytics')
   475      #for analytic in input_analytics:
   476      #    print(analytic)
   477              
   478      #print('overall_containers LAYER')
   479      #for container in overall_containers:
   480      #    print(container, overall_containers[container]['LAYER'])
   481              
   482      #print('overall_containers COMLAYER')
   483      #for container in overall_containers:
   484      #    print(container, overall_containers[container]['COMLAYER'])
   485              
   486      #print('overall_containers CPU')
   487      #for container in overall_containers:
   488      #    print(container, overall_containers[container]['CPU'])
   489              
   490      #print('overall_containers BW')
   491      #for container in overall_containers:
   492      #    print(container, overall_containers[container]['BW'])
   493  
   494      
   495      ### ----- Lagrangian Method ----- ### 
   496      if method == 'Lagrange':
   497          local_e_a = lagrange(input_analytics, input_images_dict, total_size, bandwidth, CPU)
   498          download_analytics = []
   499          for i in range(len(input_analytics)):
   500              if local_e_a[i] == 1:
   501                  download_analytics.append(input_analytics[i])
   502      ### ----- Dynamic Programing Method ----- ###
   503      elif method == 'DP':
   504          total_value, download_analytics = dynamic_program(input_analytics, len(input_analytics)-1, total_size, bandwidth, CPU)
   505      elif method == 'Greedy':
   506          print('Greedy')
   507      else: 
   508          print('Algorithm: ' + method + ' is not the valid algorithm.')
   509          return []
   510  
   511  
   512      # Check the constraints/
   513      solution_analytics = []
   514      size_constraint = 0
   515      bw_constraint = 0
   516      cpu_constraint = 0
   517      for analytic in download_analytics:
   518          exceed = False
   519          multiply = overall_containers[analytic]['BW']/const_overall_containers[analytic]['BW']
   520          new_size = size_constraint + overall_containers[analytic]['LAYER']
   521          new_bw = bw_constraint + overall_containers[analytic]['COMLAYER']
   522          new_cpu = cpu_constraint + overall_containers[analytic]['CPU']
   523          if new_size > total_size: continue
   524          if new_bw > bandwidth: continue
   525          if new_cpu > CPU: exceed = True
   526          if not exceed: 
   527              solution_analytics.append(analytic)
   528              size_constraint = new_size
   529              bw_constraint = new_bw
   530              cpu_constraint = new_cpu
   531          elif multiply > 1:
   532              for i in range(multiply):
   533                  ### ----- Lagrange Need ----- ###
   534                  if method == 'Lagrange':
   535                      new_cpu = cpu_constraint + const_overall_containers[analytic]['CPU']*1000*1000*10
   536                  ### ----- DP Need ----- ###
   537                  else:
   538                      new_cpu = cpu_constraint + const_overall_containers[analytic]['CPU']
   539  
   540                  if new_cpu > CPU: continue
   541                  if analytic not in solution_analytics: 
   542                      solution_analytics.append(analytic)
   543                      duplicate_num[duplic_analytics.index(analytic)] = 1
   544                      cpu_constraint = new_cpu
   545                  else:
   546                      duplicate_num[duplic_analytics.index(analytic)] += 1
   547                      cpu_constraint = new_cpu
   548              
   549                  
   550          #print('size_constraint: ' + str(size_constraint))
   551          #print('bw_constraint: ' + str(bw_constraint))
   552          #print('cpu_constraint: ' + str(cpu_constraint))
   553      
   554  
   555      # Insert the original duplicate items
   556      answer_analytics = []
   557      for analytic in solution_analytics:
   558          answer_analytics.append(analytic)
   559          if analytic in duplic_analytics:
   560              number = duplicate_num[duplic_analytics.index(analytic)]
   561              for a in range(number-1):
   562                  answer_analytics.append(analytic)
   563      
   564      saved_bandwidth = 0
   565      for analytic in answer_analytics:
   566          saved_bandwidth += overall_containers[analytic]['BW'] 
   567      print "saved bandwidth:",saved_bandwidth
   568   
   569      # Debug message
   570      #print('Download analytics')
   571      #for analytic in download_analytics:
   572      #    print(analytic)
   573      #print('Solution analytics')
   574      #for analytic in solution_analytics:
   575      #    print(analytic)
   576      #print('Answer analytics')
   577      #for analytic in answer_analytics:
   578      #    print(analytic)
   579  
   580      return answer_analytics
   581  
   582  
   583  def is_exist(image):
   584      client = docker.from_env()
   585      images = client.images.list()
   586      if image in images:
   587         return True
   588      return False
   589  
   590  def write_log(edge_analytics):
   591      name = []
   592      download_time = []
   593      access_num = []
   594      not_exist_num = 0
   595      f = open("/home/minion/YC/iscc19/Implementation/Algo/Replacement/images_download.log","r")
   596      for line in f:
   597          name.append(line.split(",")[0])
   598          download_time.append(line.split(",")[1])
   599          access_num.append(int(line.split(",")[2]))
   600      f.close()
   601  
   602      for analytic in edge_analytics:
   603          app = analytic[:-1]
   604          version = analytic[-1]
   605          analytic_name = overall_repo+":s2-"+app+"-"+version
   606          index = name.index(analytic_name)
   607          if is_exist(analytic):
   608             access_num[index] += 1
   609          else:
   610             not_exist_num += 1
   611             access_num[index] += 1
   612             download_time[index] = time.time()
   613          content = name[index]+','+str(download_time[index])+','+str(access_num[index])
   614          os.popen('sed -i "'+str(index+1)+'c '+content+'" /home/minion/YC/iscc19/Implementation/Algo/Replacement/images_download.log')
   615  
   616      return not_exist_num
   617  
   618  def read(filename):
   619      num = 0
   620      with open(filename) as f:
   621           analytics = f.readlines()
   622      analytics = [item.rstrip().split("-")[0] for item in analytics]
   623      num = len(analytics)
   624      return num, analytics
   625  
   626  if __name__ == '__main__':
   627  
   628      registry_size = float(sys.argv[1])
   629      network_bandwidth = int(sys.argv[2])
   630      analytics_file = sys.argv[3]
   631      method = sys.argv[4]
   632   
   633      start_time = time.time()
   634      
   635      total_CPU = 400
   636      num, master_analytics = read(analytics_file)
   637      decision = download_algo(registry_size, network_bandwidth, total_CPU, master_analytics, method)
   638  
   639      edge =  decision
   640      m_analytics = [a.split('-')[0]  for a in master_analytics]
   641  
   642      for a in edge:
   643          m_analytics.remove(a)
   644      cloud = m_analytics
   645      
   646      print 'Deploy to cloud>',';'.join(cloud),',Deploy at edge>',';'.join(edge) 
   647  
   648      if len(edge) > 0:  
   649         delete_num = write_log([edge[0]])
   650  
   651      print('time: ' + str(time.time()-start_time))