github.com/juju/juju@v0.0.0-20240327075706-a90865de2538/acceptancetests/utility.py (about) 1 import errno 2 import json 3 import logging 4 import os 5 import re 6 import socket 7 import subprocess 8 import sys 9 from contextlib import contextmanager 10 from datetime import ( 11 datetime, 12 timedelta, 13 ) 14 from time import ( 15 sleep, 16 time, 17 ) 18 19 from jujupy.utility import ( 20 ensure_deleted, 21 ensure_dir, 22 get_timeout_path, 23 get_unit_public_ip, 24 is_ipv6_address, 25 print_now, 26 qualified_model_name, 27 quote, 28 scoped_environ, 29 skip_on_missing_file, 30 temp_dir, 31 temp_yaml_file, 32 until_timeout 33 ) 34 35 # Imported for other call sites to use. 36 __all__ = [ 37 'ensure_deleted', 38 'ensure_dir', 39 'get_timeout_path', 40 'get_unit_public_ip', 41 'qualified_model_name', 42 'quote', 43 'scoped_environ', 44 'skip_on_missing_file', 45 'temp_dir', 46 'temp_yaml_file', 47 ] 48 49 # Equivalent of socket.EAI_NODATA when using windows sockets 50 # <https://msdn.microsoft.com/ms740668#WSANO_DATA> 51 WSANO_DATA = 11004 52 53 TEST_MODEL = 'test-tmp-env' 54 55 log = logging.getLogger("utility") 56 57 58 class PortTimeoutError(Exception): 59 pass 60 61 62 class LoggedException(BaseException): 63 """Raised in place of an exception that has already been logged. 64 65 This is a wrapper to avoid double-printing real Exceptions while still 66 unwinding the stack appropriately. 67 """ 68 69 def __init__(self, exception): 70 self.exception = exception 71 72 73 class JujuAssertionError(AssertionError): 74 """Exception for juju assertion failures.""" 75 76 77 def _clean_dir(maybe_dir): 78 """Pseudo-type that validates an argument to be a clean directory path. 79 80 For safety, this function will not attempt to remove existing directory 81 contents but will just report a warning. 82 """ 83 try: 84 contents = os.listdir(maybe_dir) 85 except OSError as e: 86 if e.errno == errno.ENOENT: 87 # we don't raise this error due to tests abusing /tmp/logs 88 logging.warning('Not a directory {}'.format(maybe_dir)) 89 if e.errno == errno.EEXIST: 90 logging.warning('Directory {} already exists'.format(maybe_dir)) 91 else: 92 if contents and contents != ["empty"]: 93 logging.warning( 94 'Directory {!r} has existing contents.'.format(maybe_dir)) 95 return maybe_dir 96 97 98 def as_literal_address(address): 99 """Returns address in form suitable for embedding in URL or similar. 100 101 In practice, this just puts square brackets round IPv6 addresses which 102 avoids conflict with port seperators and other uses of colons. 103 """ 104 if is_ipv6_address(address): 105 return address.join("[]") 106 return address 107 108 109 def wait_for_port(host, port, closed=False, timeout=30): 110 family = socket.AF_INET6 if is_ipv6_address(host) else socket.AF_INET 111 for remaining in until_timeout(timeout): 112 try: 113 addrinfo = socket.getaddrinfo(host, port, family, 114 socket.SOCK_STREAM) 115 except socket.error as e: 116 if e.errno not in (socket.EAI_NODATA, WSANO_DATA): 117 raise 118 if closed: 119 return 120 else: 121 continue 122 sockaddr = addrinfo[0][4] 123 # Treat Azure messed-up address lookup as a closed port. 124 if sockaddr[0] == '0.0.0.0': 125 if closed: 126 return 127 else: 128 continue 129 conn = socket.socket(*addrinfo[0][:3]) 130 conn.settimeout(max(remaining or 0, 5)) 131 try: 132 conn.connect(sockaddr) 133 except socket.timeout: 134 if closed: 135 return 136 except socket.error as e: 137 if e.errno not in (errno.ECONNREFUSED, errno.ENETUNREACH, 138 errno.ETIMEDOUT, errno.EHOSTUNREACH): 139 raise 140 if closed: 141 return 142 except socket.gaierror as e: 143 print_now(str(e)) 144 except Exception as e: 145 print_now('Unexpected {!r}: {}'.format(type(e), e)) 146 raise 147 else: 148 conn.close() 149 if not closed: 150 return 151 sleep(1) 152 raise PortTimeoutError('Timed out waiting for port.') 153 154 155 def get_revision_build(build_info): 156 for action in build_info['actions']: 157 if 'parameters' in action: 158 for parameter in action['parameters']: 159 if parameter['name'] == 'revision_build': 160 return parameter['value'] 161 162 163 def get_winrm_certs(): 164 """"Returns locations of key and cert files for winrm in cloud-city.""" 165 home = os.environ['HOME'] 166 return ( 167 os.path.join(home, 'cloud-city/winrm_client_cert.key'), 168 os.path.join(home, 'cloud-city/winrm_client_cert.pem'), 169 ) 170 171 172 def s3_cmd(params, drop_output=False): 173 s3cfg_path = os.path.join( 174 os.environ['HOME'], 'cloud-city/juju-qa.s3cfg') 175 command = ['s3cmd', '-c', s3cfg_path, '--no-progress'] + params 176 if drop_output: 177 return subprocess.check_call( 178 command, stdout=open('/dev/null', 'w')) 179 else: 180 return subprocess.check_output(command) 181 182 183 def _get_test_name_from_filename(): 184 try: 185 calling_file = sys._getframe(2).f_back.f_globals['__file__'] 186 return os.path.splitext(os.path.basename(calling_file))[0] 187 except: # noqa: E722 188 return 'unknown_test' 189 190 191 def generate_default_clean_dir(temp_env_name): 192 """Creates a new unique directory for logging and returns name""" 193 logging.debug('Environment {}'.format(temp_env_name)) 194 test_name = temp_env_name.split('-')[0] 195 timestamp = datetime.now().strftime("%Y%m%d%H%M%S") 196 log_dir = os.path.join('/tmp', test_name, 'logs', timestamp) 197 198 try: 199 os.makedirs(log_dir) 200 logging.info('Created logging directory {}'.format(log_dir)) 201 except OSError as e: 202 if e.errno == errno.EEXIST: 203 logging.warn('"Directory {} already exists'.format(log_dir)) 204 else: 205 raise ('Failed to create logging directory: {} ' + 206 log_dir + 207 '. Please specify empty folder or try again') 208 return log_dir 209 210 211 def _generate_default_temp_env_name(): 212 """Creates a new unique name for environment and returns the name""" 213 # we need to sanitize the name 214 timestamp = datetime.now().strftime("%Y%m%d%H%M%S") 215 test_name = re.sub('[^a-zA-Z]', '', _get_test_name_from_filename()) 216 return '{}-{}-temp-env'.format(test_name, timestamp) 217 218 219 def _to_deadline(timeout): 220 return datetime.utcnow() + timedelta(seconds=int(timeout)) 221 222 223 def add_arg_juju_bin(parser): 224 parser.add_argument('juju_bin', nargs='?', 225 help='Full path to the Juju binary. By default, this' 226 ' will use $PATH/juju', 227 default=None) 228 229 230 def add_basic_testing_arguments( 231 parser, using_jes=False, deadline=True, env=True, existing=True): 232 """Returns the parser loaded with basic testing arguments. 233 234 The basic testing arguments, used in conjuction with boot_context ensures 235 a test can be run in any supported substrate in parallel. 236 237 This helper adds 4 positional arguments that defines the minimum needed 238 to run a test script. 239 240 These arguments (env, juju_bin, logs, temp_env_name) allow you to specify 241 specifics for which env, juju binary, which folder for logging and an 242 environment name for your test respectively. 243 244 There are many optional args that either update the env's config or 245 manipulate the juju command line options to test in controlled situations 246 or in uncommon substrates: --debug, --verbose, --agent-url, --agent-stream, 247 --series, --bootstrap-host, --machine, --keep-env. If not using_jes, the 248 --upload-tools arg will also be added. 249 250 :param parser: an ArgumentParser. 251 :param using_jes: whether args should be tailored for JES testing. 252 :param deadline: If true, support the --timeout option and convert to a 253 deadline. 254 :param existing: If true will supply the 'existing' argument to allow 255 running on an existing bootstrapped controller. 256 """ 257 258 # Optional postional arguments 259 if env: 260 parser.add_argument( 261 'env', nargs='?', 262 help='The juju environment to base the temp test environment on.', 263 default='lxd') 264 add_arg_juju_bin(parser) 265 parser.add_argument('logs', nargs='?', type=_clean_dir, 266 help='A directory in which to store logs. By default,' 267 ' this will use the current directory', 268 default=None) 269 parser.add_argument('temp_env_name', nargs='?', 270 help='A temporary test environment name. By default, ' 271 ' this will generate an enviroment name using the' 272 ' timestamp and testname. ' 273 ' test_name_timestamp_temp_env', 274 default=_generate_default_temp_env_name()) 275 276 # Optional keyword arguments. 277 parser.add_argument('--debug', action='store_true', 278 help='Pass --debug to Juju.') 279 parser.add_argument('--verbose', action='store_const', 280 default=logging.INFO, const=logging.DEBUG, 281 help='Verbose test harness output.') 282 parser.add_argument('--region', help='Override environment region.') 283 parser.add_argument('--to', default=None, 284 help='Place the controller at a location.') 285 parser.add_argument('--agent-url', action='store', default=None, 286 help='URL for retrieving agent binaries.') 287 parser.add_argument('--agent-stream', action='store', default=None, 288 help='Stream for retrieving agent binaries.') 289 parser.add_argument('--series', action='store', default=None, 290 help='Name of the Ubuntu series to use.') 291 parser.add_argument('--arch', action='store', default=None, 292 help='Name of the architecture to use.') 293 if not using_jes: 294 parser.add_argument('--upload-tools', action='store_true', 295 help='upload local version of tools to bootstrap.') 296 parser.add_argument('--bootstrap-host', 297 help='The host to use for bootstrap.') 298 parser.add_argument('--machine', help='A machine to add or when used with ' 299 'KVM based MaaS, a KVM image to ' 300 'start.', 301 action='append', default=[]) 302 parser.add_argument('--keep-env', action='store_true', 303 help='Keep the Juju environment after the test' 304 ' completes.') 305 parser.add_argument('--logging-config', 306 help="Override logging configuration for a " 307 "deployment.", 308 default="<root>=INFO;unit=INFO") 309 parser.add_argument('--juju-home', help="Directory of juju home. It is not" 310 " used during integration test " 311 "runs. One can override this arg " 312 "for local runs.", default=None) 313 314 if existing: 315 parser.add_argument( 316 '--existing', 317 action='store', 318 default=None, 319 const='current', 320 nargs='?', 321 help='Test using an existing bootstrapped controller. ' 322 'If no controller name is provided defaults to using the ' 323 'current selected controller.') 324 if deadline: 325 parser.add_argument('--timeout', dest='deadline', type=_to_deadline, 326 help="The script timeout, in seconds.") 327 return parser 328 329 330 # suppress nosetests 331 add_basic_testing_arguments.__test__ = False 332 333 334 def configure_logging(log_level, logger=None): 335 format = '%(asctime)s %(levelname)s %(message)s' 336 datefmt = '%Y-%m-%d %H:%M:%S' 337 logging.basicConfig( 338 level=log_level, format=format, 339 datefmt=datefmt) 340 if logger: 341 formatter = logging.Formatter(fmt=format, datefmt=datefmt) 342 for handler in logger.handlers: 343 handler.setLevel(log_level) 344 handler.setFormatter(formatter) 345 346 347 def get_candidates_path(root_dir): 348 return os.path.join(root_dir, 'candidate') 349 350 351 # GZ 2015-10-15: Paths returned in filesystem dependent order, may want sort? 352 def find_candidates(root_dir, find_all=False): 353 return (path for path, buildvars in _find_candidates(root_dir, find_all)) 354 355 356 def find_latest_branch_candidates(root_dir): 357 """Return a list of one candidate per branch. 358 359 :param root_dir: The root directory to find candidates from. 360 """ 361 candidates = [] 362 for path, buildvars_path in _find_candidates(root_dir, find_all=False, 363 artifacts=True): 364 with open(buildvars_path) as buildvars_file: 365 buildvars = json.load(buildvars_file) 366 candidates.append( 367 (buildvars['branch'], int(buildvars['revision_build']), path)) 368 latest = dict( 369 (branch, (path, build)) for branch, build, path in sorted(candidates)) 370 return latest.values() 371 372 373 def _find_candidates(root_dir, find_all=False, artifacts=False): 374 candidates_path = get_candidates_path(root_dir) 375 a_week_ago = time() - timedelta(days=7).total_seconds() 376 for candidate_dir in os.listdir(candidates_path): 377 if candidate_dir.endswith('-artifacts') != artifacts: 378 continue 379 candidate_path = os.path.join(candidates_path, candidate_dir) 380 buildvars = os.path.join(candidate_path, 'buildvars.json') 381 try: 382 stat = os.stat(buildvars) 383 except OSError as e: 384 if e.errno in (errno.ENOENT, errno.ENOTDIR): 385 continue 386 raise 387 if not find_all and stat.st_mtime < a_week_ago: 388 continue 389 yield candidate_path, buildvars 390 391 392 def get_deb_arch(): 393 """Get the debian machine architecture.""" 394 return subprocess.check_output(['dpkg', '--print-architecture']).strip() 395 396 397 def extract_deb(package_path, directory): 398 """Extract a debian package to a specified directory.""" 399 subprocess.check_call(['dpkg', '-x', package_path, directory]) 400 401 402 def run_command(command, dry_run=False, verbose=False): 403 """Optionally execute a command and maybe print the output.""" 404 if verbose: 405 print_now('Executing: {}'.format(command)) 406 if not dry_run: 407 output = subprocess.check_output(command) 408 if verbose: 409 print_now(output) 410 411 412 def log_and_wrap_exception(logger, exc): 413 """Record exc details to logger and return wrapped in LoggedException.""" 414 logger.exception(exc) 415 stdout = getattr(exc, 'output', None) 416 stderr = getattr(exc, 'stderr', None) 417 if stdout or stderr: 418 logger.info('Output from exception:\nstdout:\n%s\nstderr:\n%s', 419 stdout, stderr) 420 return LoggedException(exc) 421 422 423 @contextmanager 424 def logged_exception(logger): 425 """\ 426 Record exceptions in managed context to logger and reraise LoggedException. 427 428 Note that BaseException classes like SystemExit, GeneratorExit and 429 LoggedException itself are not wrapped, except for KeyboardInterrupt. 430 """ 431 try: 432 yield 433 except (Exception, KeyboardInterrupt) as e: 434 raise log_and_wrap_exception(logger, e) 435 436 437 def assert_dict_is_subset(sub_dict, super_dict): 438 """Assert that every item in the sub_dict is in the super_dict. 439 440 :raises JujuAssertionError: when sub_dict items are missing. 441 :return: True when when sub_dict is a subset of super_dict 442 """ 443 if not is_subset(sub_dict, super_dict): 444 raise JujuAssertionError( 445 'Found: {} \nExpected: {}'.format(super_dict, sub_dict)) 446 return True 447 448 def is_subset(subset, superset): 449 """ Recursively check that subset is indeed a subset of superset """ 450 if isinstance(subset, dict): 451 return all(key in superset and is_subset(val, superset[key]) for key, val in iter(subset.items())) 452 if isinstance(subset, list) or isinstance(subset, set): 453 return all(any(is_subset(subitem, superitem) for superitem in superset) for subitem in subset) 454 return subset == superset 455 456 457 def add_model(client): 458 """Adds a model to the current juju environment then destroys it. 459 460 Will raise an exception if the Juju does not deselect the current model. 461 :param client: Jujupy ModelClient object 462 """ 463 log.info('Adding model "{}" to current controller'.format(TEST_MODEL)) 464 new_client = client.add_model(TEST_MODEL) 465 new_model = get_current_model(new_client) 466 if new_model == TEST_MODEL: 467 log.info('Current model and newly added model match') 468 else: 469 error = ('Juju failed to switch to new model after creation. ' 470 'Expected {} got {}'.format(TEST_MODEL, new_model)) 471 raise JujuAssertionError(error) 472 return new_client 473 474 475 def get_current_model(client): 476 """Gets the current model from Juju's list-models command. 477 478 :param client: Jujupy ModelClient object 479 :return: String name of current model 480 """ 481 raw = list_models(client) 482 try: 483 return raw['current-model'] 484 except KeyError: 485 log.warning('No model is currently selected.') 486 return None 487 488 489 def list_models(client): 490 """List models. 491 :param client: Jujupy ModelClient object 492 :return: Dict of list-models command 493 """ 494 try: 495 raw = client.get_juju_output('list-models', '--format', 'json', 496 include_e=False) 497 except subprocess.CalledProcessError as e: 498 log.error('Failed to list current models due to error: {}'.format(e)) 499 raise e 500 return json.loads(raw) 501 502 503 def is_subordinate(app_data): 504 return ('unit' not in app_data) and ('subordinate-to' in app_data) 505 506 507 def application_machines_from_app_info(app_data): 508 """Get all the machines used to host the given application from the 509 application info in status. 510 511 :param app_data: application info from status 512 """ 513 machines = [unit_data['machine'] for unit_data in 514 app_data['units'].values()] 515 return machines 516 517 518 def subordinate_machines_from_app_info(app_data, apps): 519 """Get the subordinate machines from a given application from the 520 application info in status. 521 522 :param app_data: application info from status 523 """ 524 machines = [] 525 for sub_name in app_data['subordinate-to']: 526 for app_name, prim_app_data in apps.items(): 527 if sub_name == app_name: 528 machines.extend(application_machines_from_app_info( 529 prim_app_data)) 530 return machines 531 532 533 def align_machine_profiles(machine_profiles): 534 """Align machine profiles will create a dict from a list of machine 535 ensuring that the machines are unique to each charm profile name. 536 537 :param machine_profiles: is a list of machine profiles tuple 538 """ 539 result = {} 540 for items in machine_profiles: 541 charm_profile = items[0] 542 if charm_profile in result: 543 # drop duplicates using set difference 544 a = set(result[charm_profile]) 545 b = set(items[1]) 546 result[charm_profile].extend(b.difference(a)) 547 else: 548 result[charm_profile] = list(items[1]) 549 return result