github.com/rita33cool1/iot-system-gateway@v0.0.0-20200911033302-e65bde238cc5/gateway/Implementation/Algo/Download/download_algo.py (about) 1 #!/usr/bin/env python 2 # -*- coding: utf-8 -*- 3 """ 4 python download_algo.py {registry_size} {bandwidth} {analytics} {IDA_method} 5 {registry_size}: 6 (default 6G) 6 {bandwidth}: 176000 (default 176Mbps) 7 {analytics}: 'analytics.input' (default) 8 'analytics.input': 9 audio1-18 10 audio4-1 11 audio4-13 12 ... 13 {method}: DP (Dynamic Programing), Lagrange (Lagrangian) 14 15 images_dict structure 16 { 17 "ImageName": 18 { 19 "LayerID": is_exist (int type) 20 "LayerID": is_exist (int type) 21 ... 22 } 23 "ImageName": 24 ... 25 } 26 """ 27 __author__ = 'YuJung Wang' 28 __date__ = '2020/04' 29 30 import os 31 import sys 32 import time 33 import math 34 import json 35 # list.copy only support since python 3.3 so use this instead 36 import copy 37 import docker 38 import commands 39 import subprocess 40 # To get the same items in a list 41 import collections 42 # write log 43 from datetime import datetime 44 from itertools import takewhile 45 import paho.mqtt.publish as publish 46 47 48 Duration = 5 # minutes 49 50 overall_images = { 51 'yolo1':{}, 'yolo2':{}, 'yolo3':{}, 'yolo4':{}, 'yolo5':{}, 'yolo6':{}, 52 'yolo7':{}, 'yolo8':{}, 'yolo9':{}, 'yolo10':{}, 'yolo11':{}, 'yolo12':{}, 53 'audio1':{}, 'audio2':{}, 'audio3':{}, 'audio4':{}, 'audio5':{}, 'audio6':{}, 54 'audio7':{}, 'audio8':{}, 'audio9':{}, 'audio10':{}, 'audio11':{}, 'audio12':{} 55 } 56 57 const_overall_containers = { 58 "yolo1":{"CPU":55.6,"RAM":13.08,"SIZE":1.81,"BW":22.48, "COMLAYER":0, "LAYER":0}, 59 "yolo2":{"CPU":55.6,"RAM":13.08,"SIZE":2.25,"BW":22.48, "COMLAYER":0, "LAYER":0}, 60 "yolo3":{"CPU":55.6,"RAM":13.08,"SIZE":2.61,"BW":22.48, "COMLAYER":0, "LAYER":0}, 61 "yolo4":{"CPU":55.6,"RAM":13.08,"SIZE":2.96,"BW":22.48, "COMLAYER":0, "LAYER":0}, 62 "yolo5":{"CPU":55.6,"RAM":13.08,"SIZE":1.81,"BW":22.48, "COMLAYER":0, "LAYER":0}, 63 "yolo6":{"CPU":55.6,"RAM":13.08,"SIZE":2.25,"BW":22.48, "COMLAYER":0, "LAYER":0}, 64 "yolo7":{"CPU":55.6,"RAM":13.08,"SIZE":2.61,"BW":22.48, "COMLAYER":0, "LAYER":0}, 65 "yolo8":{"CPU":55.6,"RAM":13.08,"SIZE":2.96,"BW":22.48, "COMLAYER":0, "LAYER":0}, 66 "yolo9":{"CPU":55.6,"RAM":13.08,"SIZE":1.81,"BW":22.48, "COMLAYER":0, "LAYER":0}, 67 "yolo10":{"CPU":55.6,"RAM":13.08,"SIZE":2.25,"BW":22.48, "COMLAYER":0, "LAYER":0}, 68 "yolo11":{"CPU":55.6,"RAM":13.08,"SIZE":2.61,"BW":22.48, "COMLAYER":0, "LAYER":0}, 69 "yolo12":{"CPU":55.6,"RAM":13.08,"SIZE":2.96,"BW":22.48, "COMLAYER":0, "LAYER":0}, 70 "audio1":{"CPU":51.3,"RAM":7.84,"SIZE":2.03,"BW":688.52, "COMLAYER":0, "LAYER":0}, 71 "audio2":{"CPU":51.3,"RAM":7.84,"SIZE":2.49,"BW":688.52, "COMLAYER":0, "LAYER":0}, 72 "audio3":{"CPU":51.3,"RAM":7.84,"SIZE":2.85,"BW":688.52, "COMLAYER":0, "LAYER":0}, 73 "audio4":{"CPU":51.3,"RAM":7.84,"SIZE":3.13,"BW":688.52, "COMLAYER":0, "LAYER":0}, 74 "audio5":{"CPU":51.3,"RAM":7.84,"SIZE":2.03,"BW":688.52, "COMLAYER":0, "LAYER":0}, 75 "audio6":{"CPU":51.3,"RAM":7.84,"SIZE":2.49,"BW":688.52, "COMLAYER":0, "LAYER":0}, 76 "audio7":{"CPU":51.3,"RAM":7.84,"SIZE":2.85,"BW":688.52, "COMLAYER":0, "LAYER":0}, 77 "audio8":{"CPU":51.3,"RAM":7.84,"SIZE":3.13,"BW":688.52, "COMLAYER":0, "LAYER":0}, 78 "audio9":{"CPU":51.3,"RAM":7.84,"SIZE":2.03,"BW":688.52, "COMLAYER":0, "LAYER":0}, 79 "audio10":{"CPU":51.3,"RAM":7.84,"SIZE":2.49,"BW":688.52, "COMLAYER":0, "LAYER":0}, 80 "audio11":{"CPU":51.3,"RAM":7.84,"SIZE":2.85,"BW":688.52, "COMLAYER":0, "LAYER":0}, 81 "audio12":{"CPU":51.3,"RAM":7.84,"SIZE":3.13,"BW":688.52, "COMLAYER":0, "LAYER":0}, 82 } 83 """ 84 { 85 "yolo5":{"CPU":55.6,"RAM":13.08,"SIZE":3.6,"BW":24.08, "COMLAYER":0, "LAYER":0}, 86 "audio5":{"CPU":51.3,"RAM":7.84,"SIZE":3.48,"BW":688.52, "COMLAYER":0, "LAYER":0} 87 } 88 """ 89 90 overall_containers = copy.deepcopy(const_overall_containers) 91 overall_com_layers = {} 92 overall_layers = {} 93 #overall_repo = 'yujungwang/iscc19' 94 overall_repo = os.environ['DOCKER_PROVIDER'] + '/' + os.environ['DOCKER_REPO'] 95 96 97 def read_image_json(imgs_list, is_exist): 98 images_dict = {} 99 for img in imgs_list: 100 images_dict[img] = {} 101 102 com_layers = {} 103 layers = {} 104 for img in images_dict: 105 if 'yolo' in img: 106 img_name = 's2-yolo-' + img.split('yolo')[-1] 107 elif 'audio' in img: 108 img_name = 's2-audio-' + img.split('audio')[-1] 109 #print('img_name: ' + img_name) 110 with open('/home/minion/YC/iscc19/Implementation/Algo/Download/image_'+img_name+'.json', 'r') as reader: 111 jf = json.loads(reader.read()) 112 #images_dict[img]['Layers'] = jf['Layers'] 113 for l in jf['Layers']: 114 if l['LayerID'] not in com_layers: 115 # Convert unit to Byte 116 com_size, unit = l['CompressLayerSize'].split(' ', 1) 117 if unit == 'GB': 118 com_layers[l['LayerID']] = float(com_size)*1000*1000*1000 119 elif unit == 'MB': 120 com_layers[l['LayerID']] = float(com_size)*1000*1000 121 elif unit == 'KB': 122 com_layers[l['LayerID']] = float(com_size)*1000 123 else: # B 124 com_layers[l['LayerID']] = float(com_size) 125 126 images_dict[img][l['LayerID']] = is_exist 127 # Bytes 128 layers[l['LayerID']] = float(l['LayerSize']) 129 return images_dict, com_layers, layers 130 131 """ 132 def get_exist_images(): 133 client = docker.from_env() 134 exist_images = client.images.list(name=overall_repo) 135 images_list = [] 136 for image in exist_images: 137 repo = str(image).split(':')[1].split(':')[0].replace("'","").replace(" ","") 138 tag = str(image).split(':')[2].replace("'","").replace(" ","").replace(">","") 139 if overall_repo == repo: 140 name = tag[3:-2] + tag[-1] 141 images_list.append(name) 142 return images_list 143 """ 144 def get_exist_images(): 145 exist_images = [] 146 client = docker.from_env() 147 images = client.images.list(name=overall_repo) 148 #print('exist images') 149 for image in images: 150 #print(image) 151 newstr = str(image).replace("<", "").replace(">","").replace("'","") 152 img = newstr.split(' ', 1)[1] 153 #print(img) 154 if ', ' in img: 155 tmp_imgs = img.split(', ') 156 for tmp_img in tmp_imgs: 157 exist_images.append(tmp_img.split(':', -1)[-1].split('-')[1]+tmp_img.split(':', -1)[-1].split('-')[-1]) 158 else: 159 exist_images.append(img.split(':', -1)[-1].split('-')[1]+img.split(':', -1)[-1].split('-')[-1]) 160 #print(img.split(':', -1)[-1].split('-')[1]+img.split(':', -1)[-1].split('-')[-1]) 161 return exist_images 162 163 164 def get_running_images(analytics): 165 images = [] 166 client = docker.from_env() 167 for container in analytics: 168 a = client.containers.get(container) 169 newstr = str(a.image).replace("<", "").replace(">","").replace("'","") 170 #images.append(newstr.split(' ')[1]) 171 repo, app, vers = newstr.split(' ')[1].split('-', 2) 172 images.append(app+vers) 173 return images 174 175 176 def get_containers(): 177 analytics = [] 178 cmd = 'docker ps| grep k8s' 179 k8s_containers_info = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT).decode('utf-8') 180 lines = k8s_containers_info.split('\n') 181 for line in lines[:-1]: 182 infos = line.split() 183 if 'k8s_audio-recognition' in infos[-1] or 'k8s_object-detection' in infos[-1]: 184 analytics.append(infos[0]) 185 return analytics 186 187 188 def get_unreplace_layers(exist_images_list): 189 unreplaced_images = [] 190 replaced_layer_nums = [] 191 is_write = False 192 with open('/home/minion/YC/iscc19/Implementation/Algo/Replacement/images_replace.log', 'r') as reader: 193 for line in reader.readlines(): 194 # If no replace images, then break 195 if line.strip() == '': break 196 rep_img, num_layers = line.rstrip().split(',') 197 rep_img = rep_img.split(':')[-1] 198 #unreplaced_images.append(rep_img[3:-2]+rep_img[-1]) 199 unreplaced_images.append(rep_img.split('-')[1]+rep_img.split('-')[-1]) 200 replaced_layer_nums.append(int(num_layers)) 201 # Remove existed layers 202 for img in unreplaced_images: 203 if img in exist_images_list: 204 del replaced_layer_nums[unreplaced_images.index(img)] 205 unreplaced_images.remove(img) 206 is_write = True 207 # Read the image information json file to get layers' information 208 images_dict = {} 209 for img in unreplaced_images: 210 images_dict[img] = {} 211 com_layers = {} 212 layers = {} 213 for img in images_dict: 214 #print('img: ' + img) 215 #img_name = 's2-' + img[:-1] + '-' + img[-1] 216 if 'yolo' in img: 217 img_name = 's2-yolo-' + img.split('yolo')[-1] 218 elif 'audio' in img: 219 img_name = 's2-audio-' + img.split('audio')[-1] 220 if overall_repo in img: 221 img_name = img.split(':')[-1] 222 #print('overall_repo: ' + overall_repo) 223 #print('img_name: ' + img_name) 224 parts = [] 225 with open('/home/minion/YC/iscc19/Implementation/Algo/Download/image_'+img_name+'.json', 'r') as reader: 226 reads = reader.read().replace('\n', '').replace(' ', '').split('[{', 1)[1] 227 #print('reads: ' + reads) 228 parts = reads.split('},{') 229 #parts = reader.read().replace('\n', '').replace(' ', '').split('[{', 1)[1].split('},{') 230 # max number of existed layers 231 max_l_num = len(parts)-replaced_layer_nums[unreplaced_images.index(img)]-1 232 #print('img: ' + img) 233 #print('max_l_num: ' + str(max_l_num)) 234 for i in range(0, len(parts)): 235 com_size_str = parts[i].split('"CompressLayerSize":"', 1)[1].split('"', 1)[0] 236 size = parts[i].split('"LayerSize":"', 1)[1].split('"', 1)[0] 237 l_id = parts[i].split('"LayerID":"', 1)[1].split('"')[0] 238 if l_id not in com_layers: 239 # Convert unit to Byte 240 #com_size, unit = com_size_str.split(' ', 1) 241 unit = com_size_str[-2:] 242 com_size = com_size_str[:-2] 243 if unit == 'GB': 244 com_layers[l_id] = float(com_size)*1000*1000*1000 245 elif unit == 'MB': 246 com_layers[l_id] = float(com_size)*1000*1000 247 elif unit == 'KB': 248 com_layers[l_id] = float(com_size)*1000 249 else: # B 250 com_layers[l_id] = float(com_size_str[:-1]) 251 # layers before max_l_num are existed (1) 252 images_dict[img][l_id] = 1 if i <= max_l_num else 0 253 #print('l_id: ' + l_id) 254 #print('i: ' + str(i)) 255 #print('images_dict[img][l_id]: ' + str(images_dict[img][l_id])) 256 # Bytes 257 layers[l_id] = float(size) 258 # Write back replacement information 259 # The new information is different from only if the unreplaced images are existed now 260 if is_write: 261 with open('/home/minion/YC/iscc19/Implementation/Algo/Replacement/images_replace.log', 'w') as writer: 262 for image in unreplaced_images: 263 #writer.write(overall_repo+':s2-'+image[:-1]+'-'+image[-1]+','+str(replaced_layer_nums[unreplaced_images.index(image)])+'\n') 264 if 'yolo' in image: 265 writer.write(overall_repo+':s2-yolo-'+image.split('yolo')[-1]+','+str(replaced_layer_nums[unreplaced_images.index(image)])+'\n') 266 elif 'audio' in image: 267 writer.write(overall_repo+':s2-audio-'+image.split('audio')[-1]+','+str(replaced_layer_nums[unreplaced_images.index(image)])+'\n') 268 269 # Debug Message 270 #print('Get unreplaced layers information') 271 #print('images_dict:', images_dict) 272 #print('com_layers:', com_layers) 273 #print('layers:', layers) 274 #print('keys:', images_dict.keys()) 275 return images_dict, com_layers, layers 276 277 278 def get_layer_size(container, layers): 279 layer_size = 0 280 com_layer_size = 0 281 for l in layers: 282 #print('l: ' + l) 283 #print('layers[l]', layers[l]) 284 if not layers[l]: 285 #print('not layers[l]') 286 layer_size += overall_layers[l] 287 com_layer_size += overall_com_layers[l] 288 container['LAYER'] = layer_size 289 container['COMLAYER'] = com_layer_size 290 #print("container['COMLAYER']", container['COMLAYER']) 291 292 293 #def dynamic_program(input_analytics, n, total_size, bandwidth, CPU): 294 #def dynamic_program(input_analytics, n, D, CPU): 295 def dynamic_program(input_analytics, n, D): 296 #size = overall_containers[input_analytics[n]]['LAYER'] 297 #bw = overall_containers[input_analytics[n]]['COMLAYER'] 298 size = overall_containers[input_analytics[n]]['COMLAYER'] 299 #cpu = overall_containers[input_analytics[n]]['CPU'] 300 value = overall_containers[input_analytics[n]]['BW'] 301 302 #print 'n: ', n 303 #print 'total_size: ', total_size 304 #print 'size: ', size 305 306 #if total_size <= 0 or bandwidth <= 0 or CPU <= 0 or n < 0: 307 #if D <= 0 or CPU <= 0 or n < 0: 308 if D <= 0 or n < 0: 309 #print('constraintes 0 or n < 0') 310 #print([]) 311 return 0, [] 312 313 #print 'n-1: ', n-1 314 #if (size > total_size) or (bw > bandwidth) or (cpu > CPU): 315 #if size > D or cpu > CPU: 316 if size > D: 317 #print 'constraint not enough' 318 #total_value, analytics = dynamic_program(input_analytics, n-1, total_size, bandwidth, CPU) 319 #total_value, analytics = dynamic_program(input_analytics, n-1, D, CPU) 320 total_value, analytics = dynamic_program(input_analytics, n-1, D) 321 #print 'total_value', total_value 322 #print 'analytics: ', analytics 323 return total_value, analytics 324 else: 325 #print 'constraint bigger enough' 326 #not_includ_value, not_includ_analyts = dynamic_program(input_analytics, n-1, total_size, bandwidth, CPU) 327 #not_includ_value, not_includ_analyts = dynamic_program(input_analytics, n-1, D, CPU) 328 not_includ_value, not_includ_analyts = dynamic_program(input_analytics, n-1, D) 329 #includ_value, includ_analyts = dynamic_program(input_analytics, n-1, total_size-size, bandwidth-bw, CPU-cpu) 330 #includ_value, includ_analyts = dynamic_program(input_analytics, n-1, D-size, CPU-cpu) 331 includ_value, includ_analyts = dynamic_program(input_analytics, n-1, D-size) 332 #print 'not_includ_analyts: ', not_includ_analyts 333 #print 'includ_analyts: ', includ_analyts 334 335 if not_includ_value >= includ_value+value: 336 #print('not_includ_value bigger') 337 #print 'analytics: ', not_includ_analyts 338 return not_includ_value, not_includ_analyts 339 else: 340 #print('includ_value bigger') 341 includ_analyts.append(input_analytics[n]) 342 #print 'analytics: ', includ_analyts 343 return includ_value + value, includ_analyts 344 345 346 #def greedy(input_analytics, total_size, bandwidth, CPU): 347 #def greedy(input_analytics, D, CPU): 348 def greedy(input_analytics, D): 349 # CP ratio 350 ds = [] 351 for analytic in input_analytics: 352 #size = overall_containers[analytic]['LAYER'] 353 #bw = overall_containers[analytic]['COMLAYER'] 354 size = overall_containers[analytic]['COMLAYER'] 355 #cpu = overall_containers[analytic]['CPU'] 356 value = overall_containers[analytic]['BW'] 357 #normal = max([size/D, cpu/CPU]) 358 normal = size/D 359 if normal == 0: 360 ds.append([analytic, value*D]) 361 else: 362 ds.append([analytic, value/normal]) 363 # Sort analytics in descending order 364 ds = sorted(ds, key=lambda a: a[1], reverse=True) 365 analytics = [item[0] for item in ds] 366 # Check resources constraints 367 download_analytics = [] 368 for analytic in analytics: 369 #print('D: ' + str(D)) 370 #size = overall_containers[analytic]['LAYER'] 371 #bw = overall_containers[analytic]['COMLAYER'] 372 size = overall_containers[analytic]['COMLAYER'] 373 #cpu = overall_containers[analytic]['CPU'] 374 #if size <= total_size and bw <= bandwidth and cpu <= CPU: 375 #if size <= D and cpu <= CPU: 376 if size <= D: 377 download_analytics.append(analytic) 378 #total_size -= size 379 #bandwidth -= bw 380 D -= size 381 #CPU -= cpu 382 #elif total_size <= 0 and bandwidth <= 0 and CPU <= 0: 383 #elif D <= 0 and CPU <= 0: 384 elif D <= 0: 385 break 386 return download_analytics 387 388 389 def roundoff(input_analytics, alpha): 390 global overall_containers 391 max_analytic = max(input_analytics, key = lambda a:overall_containers[a]['BW']) 392 max_value = overall_containers[max_analytic]['BW'] 393 k = (max_value * alpha) / len(input_analytics) 394 for analytic in input_analytics: 395 value = overall_containers[analytic]['BW'] 396 overall_containers[analytic]['BW'] = math.floor(value/k) 397 398 399 #def download_algo(total_size, bandwidth, CPU, original_analytics, method): 400 def download_algo(total_size, bandwidth, original_analytics, method, alpha): 401 ## ----- Initialization ----- ## 402 # storage GB -> B 403 #print ('Before total_size: ' + str(total_size)) 404 total_size *= 1000*1000*1000 405 #print ('total_size: ' + str(total_size)) 406 # bandwidth (Kbps) -> bps, and meltiply time step 10 minutes 407 #print ('bandwidth: ' + str(bandwidth)) 408 bandwidth *= 1000*Duration*60 409 #print ('bandwidth: ' + str(bandwidth)) 410 # Calculate remaining CPU resource 411 # Get running image 412 #running_images = get_running_images(get_containers()) 413 #for img in running_images: 414 # CPU -= const_overall_containers[img]['CPU'] 415 416 global overall_containers 417 #overall_containers = copy.deepcopy(const_overall_containers) 418 # Get Existed Images 419 exist_images_list = get_exist_images() 420 #print (exist_images_list) 421 exist_images_dict, exist_com_layers_dict, exist_layers_dict = read_image_json(exist_images_list, 1) 422 # input images, layers default assume exist 423 input_images_dict, input_com_layers_dict, input_layers_dict = read_image_json(original_analytics, 1) 424 global overall_layers 425 overall_layers = input_layers_dict 426 global overall_com_layers 427 overall_com_layers = input_com_layers_dict 428 # Get Unreplaced layers 429 unreplace_images_dict, unreplace_com_layers_dict, unreplace_layers_dict = get_unreplace_layers(exist_images_list) 430 # Check the layers of input images exist or not 431 for img in input_images_dict: 432 for lay in input_images_dict[img]: 433 try: 434 #print('img: ' + img) 435 #print('lay: ' + lay) 436 #print('overall_com_layers[lay]', overall_com_layers[lay]) 437 #print('exist_com_layers_dict[lay]', exist_com_layers_dict[lay]) 438 overall_com_layers[lay] = exist_com_layers_dict[lay] 439 #print('input_images_dict[img][lay]', input_images_dict[img][lay]) 440 except: 441 input_images_dict[img][lay] = 0 442 #print('img: ' + img) 443 #print('lay: ' + lay) 444 #print('unreplace_images_dict.keys()', unreplace_images_dict.keys()) 445 if img in unreplace_images_dict.keys(): 446 #print('unreplace_images_dict[img][lay]' + str(unreplace_images_dict[img][lay])) 447 #print('input_images_dict[img][lay]' + str(input_images_dict[img][lay])) 448 input_images_dict[img][lay] = unreplace_images_dict[img][lay] 449 #print('After input_images_dict[img][lay]' + str(input_images_dict[img][lay])) 450 else: 451 for image in unreplace_images_dict.keys(): 452 if lay in unreplace_images_dict[image].keys(): 453 input_images_dict[img][lay] = unreplace_images_dict[image][lay] 454 # Get the size of layers which need to be downloaded 455 for analytic in original_analytics: 456 #print('analytic: ' + analytic) 457 #print('overall_containers[analytic]', overall_containers[analytic]) 458 #print('input_images_dict[analytic]', input_images_dict[analytic]) 459 get_layer_size(overall_containers[analytic], input_images_dict[analytic]) 460 # Calculate the avallable storage size 461 # Firstly, find all existed layers from existed images and unreplace layers 462 #overall_using_layers = copy.deepcopy(exist_layers_dict) 463 # Here use compressed size to replace size 464 overall_using_layers = copy.deepcopy(exist_com_layers_dict) 465 #print('overall_using_layers', overall_using_layers) 466 for img in unreplace_images_dict: 467 #print('unreplace image:', img) 468 for lay in unreplace_images_dict[img]: 469 #print('unreplace layer:', lay) 470 #print('overall_using_layers:', overall_using_layers) 471 #print('unreplace_images_dict[img][lay]:', unreplace_images_dict[img][lay]) 472 if lay not in overall_using_layers.keys() and unreplace_images_dict[img][lay]: 473 #print('using unreplace layer:', lay) 474 # Here we use compressed size to replace size 475 overall_using_layers[lay] = unreplace_com_layers_dict[lay] 476 # Sum the size of all the existed layers 477 sum_existed_layers_size = 0 478 #print('sum overall_using_layers') 479 for lay in overall_using_layers: 480 #print('lay: ' + lay) 481 #print('overall_using_layers[lay]: ' + str(overall_using_layers[lay])) 482 sum_existed_layers_size += overall_using_layers[lay] 483 #print('sum_existed_layers_size: ' +str(sum_existed_layers_size)) 484 # Total available size = total size - size of existed layers 485 total_size -= sum_existed_layers_size 486 #print ('After total_size: ' + str(total_size)) 487 D = min(total_size, bandwidth) 488 #print('D: ' + str(D)) 489 490 """ 491 ### ----- Lagrange Need ----- ### 492 if method == 'Lagrange': 493 # To let the magnitude of cpu is similar to the bandwidth and the size 494 for container in overall_containers: 495 overall_containers[container]['CPU'] *= 1000*1000*10 496 #print ('CPU: ' + str(CPU)) 497 CPU *= 1000*1000*10 498 #print ('CPU: ' + str(CPU)) 499 """ 500 501 # Remove duplicate items and multiply bandwidth 502 input_analytics = [] 503 duplic_analytics = [] 504 duplicate_num = [] 505 for analytic in original_analytics: 506 """ 507 ### ----- Lagrange Need ----- ### 508 if method == 'Lagrange': 509 add_cpu = overall_containers[analytic]['CPU'] + const_overall_containers[analytic]['CPU']*1000*1000*10 510 ### ----- DP Need ----- ### 511 else: 512 add_cpu = overall_containers[analytic]['CPU'] + const_overall_containers[analytic]['CPU'] 513 """ 514 #add_cpu = overall_containers[analytic]['CPU'] + const_overall_containers[analytic]['CPU'] 515 516 if analytic not in input_analytics: 517 input_analytics.append(analytic) 518 #elif analytic not in duplic_analytics and add_cpu <= CPU: 519 elif analytic not in duplic_analytics: 520 duplic_analytics.append(analytic) 521 duplicate_num.append(2) 522 overall_containers[analytic]['BW'] += const_overall_containers[analytic]['BW'] 523 #overall_containers[analytic]['CPU'] = add_cpu 524 #elif add_cpu <= CPU: 525 # index = duplic_analytics.index(analytic) 526 # duplicate_num[index] += 1 527 # overall_containers[analytic]['BW'] += const_overall_containers[analytic]['BW'] 528 # overall_containers[analytic]['CPU'] = add_cpu 529 #else: print('duplicate and over CPU') 530 531 # Reorder items by their ['BW'] 532 #print('input_analytics') 533 #for analytic in input_analytics: 534 # print(analytic) 535 # Reorder list 536 input_analytics.sort(key=lambda a: overall_containers[a]['BW'], reverse=True) 537 #print('reorder_analytics') 538 #for analytic in input_analytics: 539 # print(analytic) 540 541 542 # Debug Message 543 #print('original_analytics') 544 #for analytic in original_analytics: 545 # print(analytic) 546 547 #print('input_analytics') 548 #for analytic in input_analytics: 549 # print(analytic) 550 551 #print('duplic_analytics') 552 #for analytic in duplic_analytics: 553 # print(analytic) 554 555 #print('duplicate_num') 556 #for num in duplicate_num: 557 # print(num) 558 559 #print('input_analytics') 560 #for analytic in input_analytics: 561 # print(analytic) 562 563 #print('overall_containers LAYER') 564 #for container in overall_containers: 565 # print(container, overall_containers[container]['LAYER']) 566 567 #print('overall_containers COMLAYER') 568 #for container in overall_containers: 569 # print(container, overall_containers[container]['COMLAYER']) 570 571 #print('overall_containers CPU') 572 #for container in overall_containers: 573 # print(container, overall_containers[container]['CPU']) 574 575 #print('overall_containers BW') 576 #for container in overall_containers: 577 # print(container, overall_containers[container]['BW']) 578 579 """ 580 ### ----- Lagrangian Method ----- ### 581 if method == 'Lagrange': 582 local_e_a = lagrange(input_analytics, input_images_dict, total_size, bandwidth, CPU) 583 download_analytics = [] 584 for i in range(len(input_analytics)): 585 if local_e_a[i] == 1: 586 download_analytics.append(input_analytics[i]) 587 """ 588 ### ----- Dynamic Programing Method ----- ### 589 if method == 'DP': 590 #total_value, download_analytics = dynamic_program(input_analytics, len(input_analytics)-1, total_size, bandwidth, CPU) 591 #total_value, download_analytics = dynamic_program(input_analytics, len(input_analytics)-1, D, CPU) 592 total_value, download_analytics = dynamic_program(input_analytics, len(input_analytics)-1, D) 593 elif method == 'FPTAS': 594 roundoff(input_analytics, alpha) 595 total_value, download_analytics = dynamic_program(input_analytics, len(input_analytics)-1, D) 596 elif method == 'Greedy': 597 #download_analytics = greedy(input_analytics, total_size, bandwidth, CPU) 598 #download_analytics = greedy(input_analytics, D, CPU) 599 download_analytics = greedy(input_analytics, D) 600 else: 601 print('Algorithm: ' + method + ' is not a valid algorithm.') 602 return [] 603 604 605 # Check the constraints/ 606 solution_analytics = [] 607 size_constraint = 0 608 #bw_constraint = 0 609 #cpu_constraint = 0 610 for analytic in download_analytics: 611 exceed = False 612 multiply = overall_containers[analytic]['BW']/const_overall_containers[analytic]['BW'] 613 #new_size = size_constraint + overall_containers[analytic]['LAYER'] 614 #new_bw = bw_constraint + overall_containers[analytic]['COMLAYER'] 615 new_size = size_constraint + overall_containers[analytic]['COMLAYER'] 616 #new_cpu = cpu_constraint + overall_containers[analytic]['CPU'] 617 #if new_size > total_size: continue 618 #if new_bw > bandwidth: continue 619 if new_size > D: continue 620 #if new_cpu > CPU: exceed = True 621 if not exceed: 622 solution_analytics.append(analytic) 623 size_constraint = new_size 624 #bw_constraint = new_bw 625 #cpu_constraint = new_cpu 626 elif multiply > 1: 627 for i in range(multiply): 628 """ 629 ### ----- Lagrange Need ----- ### 630 if method == 'Lagrange': 631 new_cpu = cpu_constraint + const_overall_containers[analytic]['CPU']*1000*1000*10 632 ### ----- DP Need ----- ### 633 else: 634 new_cpu = cpu_constraint + const_overall_containers[analytic]['CPU'] 635 """ 636 #new_cpu = cpu_constraint + const_overall_containers[analytic]['CPU'] 637 638 #if new_cpu > CPU: continue 639 if analytic not in solution_analytics: 640 solution_analytics.append(analytic) 641 duplicate_num[duplic_analytics.index(analytic)] = 1 642 #cpu_constraint = new_cpu 643 else: 644 duplicate_num[duplic_analytics.index(analytic)] += 1 645 #cpu_constraint = new_cpu 646 647 648 #print('size_constraint: ' + str(size_constraint)) 649 #print('bw_constraint: ' + str(bw_constraint)) 650 #print('cpu_constraint: ' + str(cpu_constraint)) 651 652 653 # Insert the original duplicate items 654 answer_analytics = [] 655 for analytic in solution_analytics: 656 answer_analytics.append(analytic) 657 if analytic in duplic_analytics: 658 number = duplicate_num[duplic_analytics.index(analytic)] 659 for a in range(number-1): 660 answer_analytics.append(analytic) 661 662 saved_bandwidth = 0 663 for analytic in answer_analytics: 664 saved_bandwidth += overall_containers[analytic]['BW'] 665 print "saved bandwidth:",saved_bandwidth 666 667 # Debug message 668 #print('Download analytics') 669 #for analytic in download_analytics: 670 # print(analytic) 671 #print('Solution analytics') 672 #for analytic in solution_analytics: 673 # print(analytic) 674 #print('Answer analytics') 675 #for analytic in answer_analytics: 676 # print(analytic) 677 678 return answer_analytics 679 680 681 def is_exist(image): 682 client = docker.from_env() 683 images = client.images.list(name=overall_repo) 684 if image in images: 685 return True 686 return False 687 688 """ 689 def write_log(edge_analytics): 690 name = [] 691 download_time = [] 692 access_num = [] 693 not_exist_num = 0 694 f = open("/home/minion/YC/iscc19/Implementation/Algo/Replacement/images_download.log","r") 695 for line in f: 696 name.append(line.split(",")[0]) 697 download_time.append(line.split(",")[1]) 698 access_num.append(int(line.split(",")[2])) 699 f.close() 700 701 for analytic in edge_analytics: 702 if 'yolo' in analytic: 703 app = 'yolo' 704 version = analytic.split('yolo')[-1] 705 elif 'audio' in analytic: 706 app='audio' 707 version = analytic.split('audio')[-1] 708 #app = analytic[:-1] 709 #version = analytic[-1] 710 analytic_name = overall_repo+":s2-"+app+"-"+version 711 index = name.index(analytic_name) 712 if is_exist(analytic): 713 access_num[index] += 1 714 else: 715 not_exist_num += 1 716 access_num[index] += 1 717 download_time[index] = time.time() 718 content = name[index]+','+str(download_time[index])+','+str(access_num[index]) 719 os.popen('sed -i "'+str(index+1)+'c '+content+'" /home/minion/YC/iscc19/Implementation/Algo/Replacement/images_download.log') 720 721 return not_exist_num 722 """ 723 724 def read(filename): 725 num = 0 726 with open(filename) as f: 727 analytics = f.readlines() 728 analytics = [item.rstrip().split("-")[0] for item in analytics] 729 num = len(analytics) 730 return num, analytics 731 732 if __name__ == '__main__': 733 734 registry_size = float(sys.argv[1]) 735 network_bandwidth = int(sys.argv[2]) 736 analytics_file = sys.argv[3] 737 method = sys.argv[4] 738 if len(sys.argv) < 6: 739 alpha = 0.1 740 else: 741 alpha = float(sys.argv[5]) 742 743 start_time = time.time() 744 745 #total_CPU = 400 746 num, master_analytics = read(analytics_file) 747 #decision = download_algo(registry_size, network_bandwidth, total_CPU, master_analytics, method) 748 decision = download_algo(registry_size, network_bandwidth, master_analytics, method, alpha) 749 750 edge = decision 751 m_analytics = [a.split('-')[0] for a in master_analytics] 752 753 for a in edge: 754 m_analytics.remove(a) 755 cloud = m_analytics 756 757 print 'Deploy to cloud>',';'.join(cloud),',Deploy at edge>',';'.join(edge) 758 759 #if len(edge) > 0: 760 # delete_num = write_log([edge[0]]) 761 762 print('time: ' + str(time.time()-start_time))