diff --git a/strip-harbor/sync/MailerSendNewApiClientAPI.py b/strip-harbor/sync/MailerSendNewApiClientAPI.py index 189cfd6..822097e 100644 --- a/strip-harbor/sync/MailerSendNewApiClientAPI.py +++ b/strip-harbor/sync/MailerSendNewApiClientAPI.py @@ -21,7 +21,7 @@ def __init__( self.mailersend_api_key = env("MAILERSEND_API_KEY") if not self.mailersend_api_key: self.mailersend_api_key=mailersend_api_key - self.headers_auth = "Bearer {}".format(self.mailersend_api_key) + self.headers_auth = f"Bearer {self.mailersend_api_key}" self.headers_default = { "Content-Type": "application/json", "X-Requested-With": "XMLHttpRequest", diff --git a/strip-harbor/sync/harbor.py b/strip-harbor/sync/harbor.py index 6101de8..bd08cb7 100644 --- a/strip-harbor/sync/harbor.py +++ b/strip-harbor/sync/harbor.py @@ -11,7 +11,7 @@ harbor_endpoint = 'https://' + env('HARBOR_HOST') + '/api/v2.0' harbor_projects_path = '/projects/'+env('HARBOR_PROJECT_ID') harbor_project_path = '/project/'+env('HARBOR_PROJECT_ID') -harbor_robots_path = harbor_projects_path + '/robots' +harbor_robots_path = f'{harbor_projects_path}/robots' def harbor_get(path): return requests.get( @@ -20,7 +20,7 @@ def harbor_get(path): headers={'Accept':'application/json'}) def harbor_post(path, data): - log.warning("UserName --> %s" % env('HARBOR_USERNAME')) + log.warning(f"UserName --> {env('HARBOR_USERNAME')}") return requests.post( harbor_endpoint + path, data=data, @@ -34,16 +34,28 @@ def get_robot_accounts_for_project(): def create_robot_account_for_project(account_name,email,customer_name): account = harbor_post( - harbor_robots_path, - json.dumps({ - 'name':account_name, - 'expires_at': int((datetime.datetime.now() + datetime.timedelta(days=30)).timestamp()), + harbor_robots_path, + json.dumps( + { + 'name': account_name, + 'expires_at': int( + ( + datetime.datetime.now() + datetime.timedelta(days=30) + ).timestamp() + ), 'access': [ - {'resource':harbor_project_path+'/repository','action':'pull'}, - {'resource':harbor_project_path+'/helm-chart-version','action':'read'} - ], - }) - ) + { + 'resource': f'{harbor_project_path}/repository', + 'action': 'pull', + }, + { + 'resource': f'{harbor_project_path}/helm-chart-version', + 'action': 'read', + }, + ], + } + ), + ) account=account.json() print(account) @@ -61,7 +73,9 @@ def customer_email_to_harbor_username(email): def create_harbor_user_from_customer(customer_email,strip_id,customer_name): if not customer_email: - raise ValueError("Couldn't create a harbor user for customer %s - the record doesn't have the email set" % (strip_id)) + raise ValueError( + f"Couldn't create a harbor user for customer {strip_id} - the record doesn't have the email set" + ) return create_robot_account_for_project(customer_email_to_harbor_username(customer_email),customer_email,customer_name) def provision_harbor_permissions_for_customer(customer): diff --git a/strip-harbor/sync/views.py b/strip-harbor/sync/views.py index 0438d0a..cae782e 100644 --- a/strip-harbor/sync/views.py +++ b/strip-harbor/sync/views.py @@ -47,6 +47,6 @@ def webhook_handler(request): handle_deleted_subscription(deleted_subscription) else: - print('Unhandled event type {}'.format(event.type)) + print(f'Unhandled event type {event.type}') return HttpResponse(status=200) \ No newline at end of file diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/build_env.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/build_env.py index f981fab..d199c06 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/build_env.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/build_env.py @@ -40,10 +40,7 @@ def __init__(self, path): # library paths so PyPy is correctly supported. purelib = get_python_lib(plat_specific=False, prefix=path) platlib = get_python_lib(plat_specific=True, prefix=path) - if purelib == platlib: - self.lib_dirs = [purelib] - else: - self.lib_dirs = [purelib, platlib] + self.lib_dirs = [purelib] if purelib == platlib else [purelib, platlib] class BuildEnvironment: @@ -115,8 +112,7 @@ def __enter__(self): } path = self._bin_dirs[:] - old_path = self._save_env['PATH'] - if old_path: + if old_path := self._save_env['PATH']: path.extend(old_path.split(os.pathsep)) pythonpath = [self._site_dir] @@ -184,8 +180,7 @@ def install_requirements( args.extend(('--' + format_control.replace('_', '-'), ','.join(sorted(formats or {':none:'})))) - index_urls = finder.index_urls - if index_urls: + if index_urls := finder.index_urls: args.extend(['-i', index_urls[0]]) for extra_index in index_urls[1:]: args.extend(['--extra-index-url', extra_index]) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cache.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cache.py index e41ea42..58885a8 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cache.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cache.py @@ -83,12 +83,7 @@ def _get_cache_path_parts(self, link): # difference for our use case here. hashed = _hash_dict(key_parts) - # We want to nest the directories some to prevent having a ton of top - # level directories where we might run out of sub directories on some - # FS. - parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] - - return parts + return [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] def _get_candidates(self, link, canonical_package_name): # type: (Link, str) -> List[Any] @@ -109,8 +104,7 @@ def _get_candidates(self, link, canonical_package_name): candidates = [] path = self.get_path_for_link(link) if os.path.isdir(path): - for candidate in os.listdir(path): - candidates.append((candidate, path)) + candidates.extend((candidate, path) for candidate in os.listdir(path)) return candidates def get_path_for_link(self, link): @@ -259,9 +253,7 @@ def get( ): # type: (...) -> Link cache_entry = self.get_cache_entry(link, package_name, supported_tags) - if cache_entry is None: - return link - return cache_entry.link + return link if cache_entry is None else cache_entry.link def get_cache_entry( self, @@ -287,7 +279,4 @@ def get_cache_entry( package_name=package_name, supported_tags=supported_tags, ) - if retval is not link: - return CacheEntry(retval, persistent=False) - - return None + return CacheEntry(retval, persistent=False) if retval is not link else None diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/autocompletion.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/autocompletion.py index 4c51dad..9359d2a 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/autocompletion.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/autocompletion.py @@ -33,12 +33,7 @@ def autocomplete(): subcommands = list(commands_dict) options = [] - # subcommand - subcommand_name = None # type: Optional[str] - for word in cwords: - if word in subcommands: - subcommand_name = word - break + subcommand_name = next((word for word in cwords if word in subcommands), None) # subcommand options if subcommand_name is not None: # special case: 'help' subcommand has no options @@ -50,13 +45,12 @@ def autocomplete(): not current.startswith('-') ) if should_list_installed: - installed = [] lc = current.lower() - for dist in get_installed_distributions(local_only=True): - if dist.key.startswith(lc) and dist.key not in cwords[1:]: - installed.append(dist.key) - # if there are no dists installed, fall back to option completion - if installed: + if installed := [ + dist.key + for dist in get_installed_distributions(local_only=True) + if dist.key.startswith(lc) and dist.key not in cwords[1:] + ]: for dist in installed: print(dist) sys.exit(1) @@ -65,27 +59,25 @@ def autocomplete(): for opt in subcommand.parser.option_list_all: if opt.help != optparse.SUPPRESS_HELP: - for opt_str in opt._long_opts + opt._short_opts: - options.append((opt_str, opt.nargs)) - + options.extend( + (opt_str, opt.nargs) + for opt_str in opt._long_opts + opt._short_opts + ) # filter out previously specified options from available options prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] options = [(x, v) for (x, v) in options if x not in prev_opts] # filter options by current input options = [(k, v) for k, v in options if k.startswith(current)] - # get completion type given cwords and available subcommand options - completion_type = get_path_completion_type( - cwords, cword, subcommand.parser.option_list_all, - ) - # get completion files and directories if ``completion_type`` is - # ````, ```` or ```` - if completion_type: + if completion_type := get_path_completion_type( + cwords, + cword, + subcommand.parser.option_list_all, + ): paths = auto_complete_paths(current, completion_type) options = [(path, 0) for path in paths] for option in options: opt_label = option[0] - # append '=' to options which require args - if option[1] and option[0][:2] == "--": + if option[1] and opt_label[:2] == "--": opt_label += '=' print(opt_label) else: @@ -98,13 +90,11 @@ def autocomplete(): for opt in flattened_opts: if opt.help != optparse.SUPPRESS_HELP: subcommands += opt._long_opts + opt._short_opts - else: - # get completion type given cwords and all available options - completion_type = get_path_completion_type(cwords, cword, - flattened_opts) - if completion_type: - subcommands = list(auto_complete_paths(current, - completion_type)) + elif completion_type := get_path_completion_type( + cwords, cword, flattened_opts + ): + subcommands = list(auto_complete_paths(current, + completion_type)) print(' '.join([x for x in subcommands if x.startswith(current)])) sys.exit(1) @@ -125,11 +115,14 @@ def get_path_completion_type(cwords, cword, opts): if opt.help == optparse.SUPPRESS_HELP: continue for o in str(opt).split('/'): - if cwords[cword - 2].split('=')[0] == o: - if not opt.metavar or any( - x in ('path', 'file', 'dir') - for x in opt.metavar.split('/')): - return opt.metavar + if cwords[cword - 2].split('=')[0] == o and ( + not opt.metavar + or any( + x in ('path', 'file', 'dir') + for x in opt.metavar.split('/') + ) + ): + return opt.metavar return None diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/base_command.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/base_command.py index d6645fc..d122df8 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/base_command.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/base_command.py @@ -143,13 +143,15 @@ def _main(self, args): if options.exists_action: os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) - if options.require_venv and not self.ignore_require_venv: - # If a venv is required check if it can really be found - if not running_under_virtualenv(): - logger.critical( - 'Could not find an activated virtualenv (required).' - ) - sys.exit(VIRTUALENV_NOT_FOUND) + if ( + options.require_venv + and not self.ignore_require_venv + and not running_under_virtualenv() + ): + logger.critical( + 'Could not find an activated virtualenv (required).' + ) + sys.exit(VIRTUALENV_NOT_FOUND) if options.cache_dir: options.cache_dir = normalize_path(options.cache_dir) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/cmdoptions.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/cmdoptions.py index 16fe14b..dae13b3 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/cmdoptions.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/cmdoptions.py @@ -120,12 +120,11 @@ def check_dist_restriction(options, check_target=False): ":none:)." ) - if check_target: - if dist_restriction_set and not options.target_dir: - raise CommandError( - "Can not use any platform or abi specific options unless " - "installing via '--target'" - ) + if check_target and dist_restriction_set and not options.target_dir: + raise CommandError( + "Can not use any platform or abi specific options unless " + "installing via '--target'" + ) def _path_option_check(option, opt, value): @@ -614,16 +613,13 @@ def add_target_python_options(cmd_opts): def make_target_python(options): - # type: (Values) -> TargetPython - target_python = TargetPython( + return TargetPython( platforms=options.platforms, py_version_info=options.python_version, abis=options.abis, implementation=options.implementation, ) - return target_python - def prefer_binary(): # type: () -> Option @@ -826,12 +822,13 @@ def _handle_merge_hash(option, opt_str, value, parser): try: algo, digest = value.split(':', 1) except ValueError: - parser.error('Arguments to {} must be a hash name ' # noqa - 'followed by a value, like --hash=sha256:' - 'abcde...'.format(opt_str)) + parser.error( + f'Arguments to {opt_str} must be a hash name followed by a value, like --hash=sha256:abcde...' + ) if algo not in STRONG_HASHES: - parser.error('Allowed hash algorithms for {} are {}.'.format( # noqa - opt_str, ', '.join(STRONG_HASHES))) + parser.error( + f"Allowed hash algorithms for {opt_str} are {', '.join(STRONG_HASHES)}." + ) parser.values.hashes.setdefault(algo, []).append(digest) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/parser.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/parser.py index 79e56e8..5048cb2 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/parser.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/parser.py @@ -55,43 +55,32 @@ def _format_option_strings(self, option, mvarfmt=' <{}>', optsep=', '): return ''.join(opts) def format_heading(self, heading): - if heading == 'Options': - return '' - return heading + ':\n' + return '' if heading == 'Options' else heading + ':\n' def format_usage(self, usage): """ Ensure there is only one newline between usage and the first heading if there is no description. """ - msg = '\nUsage: {}\n'.format( - self.indent_lines(textwrap.dedent(usage), " ")) - return msg + return f'\nUsage: {self.indent_lines(textwrap.dedent(usage), " ")}\n' def format_description(self, description): # leave full control over description to us - if description: - if hasattr(self.parser, 'main'): - label = 'Commands' - else: - label = 'Description' - # some doc strings have initial newlines, some don't - description = description.lstrip('\n') - # some doc strings have final newlines and spaces, some don't - description = description.rstrip() - # dedent, then reindent - description = self.indent_lines(textwrap.dedent(description), " ") - description = f'{label}:\n{description}\n' - return description - else: + if not description: return '' + label = 'Commands' if hasattr(self.parser, 'main') else 'Description' + # some doc strings have initial newlines, some don't + description = description.lstrip('\n') + # some doc strings have final newlines and spaces, some don't + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = f'{label}:\n{description}\n' + return description def format_epilog(self, epilog): # leave full control over epilog to us - if epilog: - return epilog - else: - return '' + return epilog or '' def indent_lines(self, text, indent): new_lines = [indent + line for line in text.split('\n')] @@ -191,8 +180,7 @@ def _get_ordered_configuration_items(self): # Yield each group in their override order for section in override_order: - for key, val in section_items[section]: - yield key, val + yield from section_items[section] def _update_defaults(self, defaults): """Updates the given defaults with values from the config files and @@ -205,7 +193,7 @@ def _update_defaults(self, defaults): # Then set the options with those values for key, val in self._get_ordered_configuration_items(): # '--' because configuration supports only long names - option = self.get_option('--' + key) + option = self.get_option(f'--{key}') # Ignore options not present in this parser. E.g. non-globals put # in [global] by users that want them to apply to all applicable @@ -218,9 +206,7 @@ def _update_defaults(self, defaults): val = strtobool(val) except ValueError: self.error( - '{} is not a valid value for {} option, ' # noqa - 'please specify a boolean value like yes/no, ' - 'true/false or 1/0 instead.'.format(val, key) + f'{val} is not a valid value for {key} option, please specify a boolean value like yes/no, true/false or 1/0 instead.' ) elif option.action == 'count': with suppress(ValueError): @@ -229,10 +215,7 @@ def _update_defaults(self, defaults): val = int(val) if not isinstance(val, int) or val < 0: self.error( - '{} is not a valid value for {} option, ' # noqa - 'please instead specify either a non-negative integer ' - 'or a boolean value like yes/no or false/true ' - 'which is equivalent to 1/0.'.format(val, key) + f'{val} is not a valid value for {key} option, please instead specify either a non-negative integer or a boolean value like yes/no or false/true which is equivalent to 1/0.' ) elif option.action == 'append': val = val.split() diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/progress_bars.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/progress_bars.py index ac60d59..a7e1a17 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/progress_bars.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/progress_bars.py @@ -144,16 +144,12 @@ def downloaded(self): def download_speed(self): # type: () -> str # Avoid zero division errors... - if self.avg == 0.0: # type: ignore - return "..." - return format_size(1 / self.avg) + "/s" # type: ignore + return "..." if self.avg == 0.0 else f"{format_size(1 / self.avg)}/s" @property def pretty_eta(self): # type: () -> str - if self.eta: # type: ignore - return f"eta {self.eta_td}" # type: ignore - return "" + return f"eta {self.eta_td}" if self.eta else "" def iter(self, it): # type: ignore for x in it: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/req_command.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/req_command.py index b7c5c4f..fc24e49 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/req_command.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/cli/req_command.py @@ -60,11 +60,9 @@ def _get_index_urls(cls, options): """Return a list of index urls from user-provided options.""" index_urls = [] if not getattr(options, "no_index", False): - url = getattr(options, "index_url", None) - if url: + if url := getattr(options, "index_url", None): index_urls.append(url) - urls = getattr(options, "extra_index_urls", None) - if urls: + if urls := getattr(options, "extra_index_urls", None): index_urls.extend(urls) # Return None rather than an empty list return index_urls or None @@ -392,8 +390,7 @@ def trace_basic_info(finder): """ # Display where finder is looking for packages search_scope = finder.search_scope - locations = search_scope.get_formatted_locations() - if locations: + if locations := search_scope.get_formatted_locations(): logger.info(locations) def _build_package_finder( diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/__init__.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/__init__.py index 315b5dd..4fa3383 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/__init__.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/__init__.py @@ -95,9 +95,7 @@ def create_command(name, **kwargs): module_path, class_name, summary = commands_dict[name] module = importlib.import_module(module_path) command_class = getattr(module, class_name) - command = command_class(name=name, summary=summary, **kwargs) - - return command + return command_class(name=name, summary=summary, **kwargs) def get_similar_commands(name): @@ -107,9 +105,7 @@ def get_similar_commands(name): name = name.lower() - close_commands = get_close_matches(name, commands_dict.keys()) - - if close_commands: + if close_commands := get_close_matches(name, commands_dict.keys()): return close_commands[0] else: return None diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/cache.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/cache.py index d2f7ae0..bd13cdf 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/cache.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/cache.py @@ -133,11 +133,7 @@ def list_cache_items(self, options, args): if len(args) > 1: raise CommandError('Too many arguments') - if args: - pattern = args[0] - else: - pattern = '*' - + pattern = args[0] if args else '*' files = self._find_wheels(options, pattern) if options.list_format == 'human': self.format_for_human(files) @@ -163,10 +159,7 @@ def format_for_abspath(self, files): if not files: return - results = [] - for filename in files: - results.append(filename) - + results = list(files) logger.info('\n'.join(sorted(results))) def remove_cache_items(self, options, args): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/check.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/check.py index d938da5..4799b85 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/check.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/check.py @@ -46,6 +46,5 @@ def run(self, options, args): if missing or conflicting or parsing_probs: return ERROR - else: - write_output("No broken requirements found.") - return SUCCESS + write_output("No broken requirements found.") + return SUCCESS diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/completion.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/completion.py index 7b690fa..a993569 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/completion.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/completion.py @@ -81,16 +81,14 @@ def run(self, options, args): # type: (Values, List[str]) -> int """Prints the completion code of the given shell""" shells = COMPLETION_SCRIPTS.keys() - shell_options = ['--' + shell for shell in sorted(shells)] + shell_options = [f'--{shell}' for shell in sorted(shells)] if options.shell in shells: script = textwrap.dedent( COMPLETION_SCRIPTS.get(options.shell, '').format( prog=get_prog()) ) print(BASE_COMPLETION.format(script=script, shell=options.shell)) - return SUCCESS else: - sys.stderr.write( - 'ERROR: You must pass {}\n' .format(' or '.join(shell_options)) - ) - return SUCCESS + sys.stderr.write(f"ERROR: You must pass {' or '.join(shell_options)}\n") + + return SUCCESS diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/configuration.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/configuration.py index ad59f02..9f2a240 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/configuration.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/configuration.py @@ -235,26 +235,17 @@ def open_in_editor(self, options, args): try: subprocess.check_call([editor, fname]) except subprocess.CalledProcessError as e: - raise PipError( - "Editor Subprocess exited with exit code {}" - .format(e.returncode) - ) + raise PipError(f"Editor Subprocess exited with exit code {e.returncode}") def _get_n_args(self, args, example, n): # type: (List[str], str, int) -> Any """Helper to make sure the command got the right number of arguments """ if len(args) != n: - msg = ( - 'Got unexpected number of arguments, expected {}. ' - '(example: "{} config {}")' - ).format(n, get_prog(), example) + msg = f'Got unexpected number of arguments, expected {n}. (example: "{get_prog()} config {example}")' raise PipError(msg) - if n == 1: - return args[0] - else: - return args + return args[0] if n == 1 else args def _save_configuration(self): # type: () -> None diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/debug.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/debug.py index 893383a..59482d5 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/debug.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/debug.py @@ -101,8 +101,7 @@ def show_actual_vendor_versions(vendor_txt_versions): ' vendor.txt specified version)' actual_version = expected_version elif actual_version != expected_version: - extra_message = ' (CONFLICT: vendor.txt suggests version should'\ - ' be {})'.format(expected_version) + extra_message = f' (CONFLICT: vendor.txt suggests version should be {expected_version})' logger.info('%s==%s%s', module_name, actual_version, extra_message) @@ -122,13 +121,11 @@ def show_tags(options): target_python = make_target_python(options) tags = target_python.get_tags() - # Display the target options that were explicitly provided. - formatted_target = target_python.format_given() - suffix = '' - if formatted_target: + if formatted_target := target_python.format_given(): suffix = f' (target: {formatted_target})' - - msg = 'Compatible tags: {}{}'.format(len(tags), suffix) + else: + suffix = '' + msg = f'Compatible tags: {len(tags)}{suffix}' logger.info(msg) if options.verbose < 1 and len(tags) > tag_limit: @@ -151,10 +148,7 @@ def show_tags(options): def ca_bundle_info(config): # type: (Configuration) -> str - levels = set() - for key, _ in config.items(): - levels.add(key.split('.')[0]) - + levels = {key.split('.')[0] for key, _ in config.items()} if not levels: return "Not specified" diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/freeze.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/freeze.py index 6eb1db2..74b251d 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/freeze.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/freeze.py @@ -63,8 +63,8 @@ def add_options(self): '--all', dest='freeze_all', action='store_true', - help='Do not skip these packages in the output:' - ' {}'.format(', '.join(DEV_PKGS))) + help=f"Do not skip these packages in the output: {', '.join(DEV_PKGS)}", + ) self.cmd_opts.add_option( '--exclude-editable', dest='exclude_editable', diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/hash.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/hash.py index 891e393..a9108ea 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/hash.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/hash.py @@ -29,13 +29,14 @@ class HashCommand(Command): def add_options(self): # type: () -> None self.cmd_opts.add_option( - '-a', '--algorithm', + '-a', + '--algorithm', dest='algorithm', choices=STRONG_HASHES, action='store', default=FAVORITE_HASH, - help='The hash algorithm to use: one of {}'.format( - ', '.join(STRONG_HASHES))) + help=f"The hash algorithm to use: one of {', '.join(STRONG_HASHES)}", + ) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/install.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/install.py index 7410e50..90ba326 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/install.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/install.py @@ -348,18 +348,11 @@ def run(self, options, args): global_options=[], ) - # If we're using PEP 517, we cannot do a direct install - # so we fail here. - pep517_build_failure_names = [ - r.name # type: ignore - for r in build_failures if r.use_pep517 - ] # type: List[str] - if pep517_build_failure_names: + if pep517_build_failure_names := [ + r.name for r in build_failures if r.use_pep517 # type: ignore + ]: raise InstallationError( - "Could not build wheels for {} which use" - " PEP 517 and cannot be installed directly".format( - ", ".join(pep517_build_failure_names) - ) + f'Could not build wheels for {", ".join(pep517_build_failure_names)} which use PEP 517 and cannot be installed directly' ) # For now, we just warn about failures building legacy @@ -414,11 +407,10 @@ def run(self, options, args): for result in installed: item = result.name try: - installed_version = get_installed_version( + if installed_version := get_installed_version( result.name, working_set=working_set - ) - if installed_version: - item += '-' + installed_version + ): + item += f'-{installed_version}' except Exception: pass items.append(item) @@ -429,8 +421,7 @@ def run(self, options, args): resolver_variant=self.determine_resolver_variant(options), ) - installed_desc = ' '.join(items) - if installed_desc: + if installed_desc := ' '.join(items): write_output( 'Successfully installed %s', installed_desc, ) @@ -702,13 +693,10 @@ def create_os_error_message(error, show_traceback, using_user_site): It may occur anytime during the execution of the install command. """ - parts = [] + parts = ["Could not install packages due to an OSError"] - # Mention the error if we are not going to show a traceback - parts.append("Could not install packages due to an OSError") if not show_traceback: - parts.append(": ") - parts.append(str(error)) + parts.extend((": ", str(error))) else: parts.append(".") @@ -718,10 +706,10 @@ def create_os_error_message(error, show_traceback, using_user_site): # Suggest useful actions to the user: # (1) using user site-packages or (2) verifying the permissions if error.errno == errno.EACCES: - user_option_part = "Consider using the `--user` option" permissions_part = "Check the permissions" if not using_user_site: + user_option_part = "Consider using the `--user` option" parts.extend([ user_option_part, " or ", permissions_part.lower(), diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/list.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/list.py index f54c67b..d571911 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/list.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/list.py @@ -219,10 +219,7 @@ def latest_info(dist): return None remote_version = best_candidate.version - if best_candidate.link.is_wheel: - typ = 'wheel' - else: - typ = 'sdist' + typ = 'wheel' if best_candidate.link.is_wheel else 'sdist' # This is dirty but makes the rest of the code much cleaner dist.latest_version = remote_version dist.latest_filetype = typ @@ -292,9 +289,7 @@ def format_for_columns(pkgs, options): row = [proj.project_name, proj.version] if running_outdated: - row.append(proj.latest_version) - row.append(proj.latest_filetype) - + row.extend((proj.latest_version, proj.latest_filetype)) if options.verbose >= 1 or dist_is_editable(proj): row.append(proj.location) if options.verbose >= 1: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/search.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/search.py index 730aba8..b213d9f 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/search.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/search.py @@ -60,14 +60,9 @@ def run(self, options, args): pypi_hits = self.search(query, options) hits = transform_hits(pypi_hits) - terminal_width = None - if sys.stdout.isatty(): - terminal_width = shutil.get_terminal_size()[0] - + terminal_width = shutil.get_terminal_size()[0] if sys.stdout.isatty() else None print_results(hits, terminal_width=terminal_width) - if pypi_hits: - return SUCCESS - return NO_MATCHES_FOUND + return SUCCESS if pypi_hits else NO_MATCHES_FOUND def search(self, query, options): # type: (List[str], Values) -> List[Dict[str, str]] @@ -122,10 +117,14 @@ def print_results(hits, name_column_width=None, terminal_width=None): if not hits: return if name_column_width is None: - name_column_width = max([ - len(hit['name']) + len(highest_version(hit.get('versions', ['-']))) - for hit in hits - ]) + 4 + name_column_width = ( + max( + len(hit['name']) + + len(highest_version(hit.get('versions', ['-']))) + for hit in hits + ) + + 4 + ) installed_packages = [p.project_name for p in pkg_resources.working_set] for hit in hits: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/show.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/show.py index 71dfa96..81576ae 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/show.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/show.py @@ -47,10 +47,13 @@ def run(self, options, args): query = args results = search_packages_info(query) - if not print_results( - results, list_files=options.files, verbose=options.verbose): - return ERROR - return SUCCESS + return ( + SUCCESS + if print_results( + results, list_files=options.files, verbose=options.verbose + ) + else ERROR + ) def search_packages_info(query): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/uninstall.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/uninstall.py index 5a91b7a..90c02ab 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/uninstall.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/commands/uninstall.py @@ -84,10 +84,10 @@ def run(self, options, args): ) for req in reqs_to_uninstall.values(): - uninstall_pathset = req.uninstall( - auto_confirm=options.yes, verbose=self.verbosity > 0, - ) - if uninstall_pathset: + if uninstall_pathset := req.uninstall( + auto_confirm=options.yes, + verbose=self.verbosity > 0, + ): uninstall_pathset.commit() return SUCCESS diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/configuration.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/configuration.py index 0a96b3a..5498c9a 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/configuration.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/configuration.py @@ -63,10 +63,7 @@ def _normalize_name(name): def _disassemble_key(name): # type: (str) -> List[str] if "." not in name: - error_message = ( - "Key does not contain dot separated section and key. " - "Perhaps you wanted to use 'global.{}' instead?" - ).format(name) + error_message = f"Key does not contain dot separated section and key. Perhaps you wanted to use 'global.{name}' instead?" raise ConfigurationError(error_message) return name.split(".", 1) @@ -114,9 +111,7 @@ def __init__(self, isolated, load_only=None): if load_only is not None and load_only not in VALID_LOAD_ONLY: raise ConfigurationError( - "Got invalid value for load_only - should be one of {}".format( - ", ".join(map(repr, VALID_LOAD_ONLY)) - ) + f'Got invalid value for load_only - should be one of {", ".join(map(repr, VALID_LOAD_ONLY))}' ) self.isolated = isolated self.load_only = load_only @@ -248,7 +243,7 @@ def _dictionary(self): retval = {} for variant in OVERRIDE_ORDER: - retval.update(self._config[variant]) + retval |= self._config[variant] return retval @@ -257,7 +252,7 @@ def _load_config_files(self): """Loads configuration from configuration files """ config_files = dict(self.iter_config_files()) - if config_files[kinds.ENV][0:1] == [os.devnull]: + if config_files[kinds.ENV][:1] == [os.devnull]: logger.debug( "Skipping loading configuration files due to " "environment's PIP_CONFIG_FILE being os.devnull" @@ -303,9 +298,7 @@ def _construct_parser(self, fname): except UnicodeDecodeError: # See https://github.com/pypa/pip/issues/4963 raise ConfigurationFileCouldNotBeLoaded( - reason="contains invalid {} characters".format( - locale.getpreferredencoding(False) - ), + reason=f"contains invalid {locale.getpreferredencoding(False)} characters", fname=fname, ) except configparser.Error as error: @@ -330,7 +323,7 @@ def _normalized_keys(self, section, items): """ normalized = {} for name, val in items: - key = section + "." + _normalize_name(name) + key = f"{section}.{_normalize_name(name)}" normalized[key] = val return normalized @@ -384,16 +377,15 @@ def _get_parser_to_modify(self): # type: () -> Tuple[str, RawConfigParser] # Determine which parser to modify assert self.load_only - parsers = self._parsers[self.load_only] - if not parsers: + if parsers := self._parsers[self.load_only]: + # Use the highest priority parser. + return parsers[-1] + else: # This should not happen if everything works correctly. raise ConfigurationError( "Fatal Internal error [id=2]. Please report as a bug." ) - # Use the highest priority parser. - return parsers[-1] - # XXX: This is patched in the tests. def _mark_as_modified(self, fname, parser): # type: (str, RawConfigParser) -> None diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/distributions/sdist.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/distributions/sdist.py index dceba50..b2eff8f 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/distributions/sdist.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/distributions/sdist.py @@ -33,9 +33,7 @@ def prepare_distribution_metadata(self, finder, build_isolation): # Load pyproject.toml, to determine whether PEP 517 is to be used self.req.load_pyproject_toml() - # Set up the build isolation, if this requirement should be isolated - should_isolate = self.req.use_pep517 and build_isolation - if should_isolate: + if should_isolate := self.req.use_pep517 and build_isolation: self._setup_isolation(finder) self.req.prepare_metadata() diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/exceptions.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/exceptions.py index a9a0b5f..94affa5 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/exceptions.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/exceptions.py @@ -54,11 +54,7 @@ def __str__(self): # type: () -> str # Use `dist` in the error message because its stringification # includes more information, like the version and location. - return ( - 'None {} metadata found for distribution: {}'.format( - self.metadata_name, self.dist, - ) - ) + return f'None {self.metadata_name} metadata found for distribution: {self.dist}' class DistributionNotFound(InstallationError): @@ -148,10 +144,7 @@ def __init__(self, returncode, description): def __str__(self): # type: () -> str - return ( - "Command errored out with exit status {}: {} " - "Check the logs for full command output." - ).format(self.returncode, self.description) + return f"Command errored out with exit status {self.returncode}: {self.description} Check the logs for full command output." class HashErrors(InstallationError): @@ -172,9 +165,7 @@ def __str__(self): for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): lines.append(cls.head) lines.extend(e.body() for e in errors_of_cls) - if lines: - return '\n'.join(lines) - return '' + return '\n'.join(lines) if lines else '' def __nonzero__(self): # type: () -> bool @@ -281,13 +272,8 @@ def body(self): # In the case of URL-based requirements, display the original URL # seen in the requirements file rather than the package name, # so the output can be directly copied into the requirements file. - package = (self.req.original_link if self.req.original_link - # In case someone feeds something downright stupid - # to InstallRequirement's constructor. - else getattr(self.req, 'req', None)) - return ' {} --hash={}:{}'.format(package or 'unknown package', - FAVORITE_HASH, - self.gotten_hash) + package = self.req.original_link or getattr(self.req, 'req', None) + return f" {package or 'unknown package'} --hash={FAVORITE_HASH}:{self.gotten_hash}" class HashUnpinned(HashError): @@ -327,8 +313,7 @@ def __init__(self, allowed, gots): def body(self): # type: () -> str - return ' {}:\n{}'.format(self._requirement_name(), - self._hash_comparison()) + return f' {self._requirement_name()}:\n{self._hash_comparison()}' def _hash_comparison(self): # type: () -> str diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/index/collector.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/index/collector.py index 83917a2..2d3e317 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/index/collector.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/index/collector.py @@ -58,10 +58,14 @@ def _match_vcs_scheme(url): Returns the matched VCS scheme, or None if there's no match. """ - for scheme in vcs.schemes: - if url.lower().startswith(scheme) and url[len(scheme)] in '+:': - return scheme - return None + return next( + ( + scheme + for scheme in vcs.schemes + if url.lower().startswith(scheme) and url[len(scheme)] in '+:' + ), + None, + ) class _NotHTML(Exception): @@ -218,21 +222,14 @@ def _clean_url_path(path, is_local_path): """ Clean the path portion of a URL. """ - if is_local_path: - clean_func = _clean_file_url_path - else: - clean_func = _clean_url_path_part - + clean_func = _clean_file_url_path if is_local_path else _clean_url_path_part # Split on the reserved characters prior to cleaning so that # revision strings in VCS URLs are properly preserved. parts = _reserved_chars_re.split(path) cleaned_parts = [] for to_clean, reserved in pairwise(itertools.chain(parts, [''])): - cleaned_parts.append(clean_func(to_clean)) - # Normalize %xx escapes (e.g. %2f -> %2F) - cleaned_parts.append(reserved.upper()) - + cleaned_parts.extend((clean_func(to_clean), reserved.upper())) return ''.join(cleaned_parts) @@ -274,15 +271,13 @@ def _create_link_from_element( # This is a unicode string in Python 2 (and 3). yanked_reason = unescape(yanked_reason) - link = Link( + return Link( url, comes_from=page_url, requires_python=pyrequire, yanked_reason=yanked_reason, ) - return link - class CacheablePageContent: def __init__(self, page): @@ -408,9 +403,7 @@ def _get_html_page(link, session=None): url = link.url.split('#', 1)[0] - # Check for VCS schemes that do not support lookup as web pages. - vcs_scheme = _match_vcs_scheme(url) - if vcs_scheme: + if vcs_scheme := _match_vcs_scheme(url): logger.warning('Cannot look at %s URL %s because it does not support ' 'lookup as web pages.', vcs_scheme, link) return None @@ -443,8 +436,7 @@ def _get_html_page(link, session=None): except RetryError as exc: _handle_get_page_fail(link, exc) except SSLError as exc: - reason = "There was a problem confirming the ssl certificate: " - reason += str(exc) + reason = f"There was a problem confirming the ssl certificate: {str(exc)}" _handle_get_page_fail(link, reason, meth=logger.info) except requests.ConnectionError as exc: _handle_get_page_fail(link, f"connection error: {exc}") @@ -490,10 +482,7 @@ def sort_path(path): is_file_url = url.startswith('file:') if is_local_path or is_file_url: - if is_local_path: - path = url - else: - path = url_to_path(url) + path = url if is_local_path else url_to_path(url) if os.path.isdir(path): if expand_dir: path = os.path.realpath(path) @@ -599,10 +588,10 @@ def create(cls, session, options, suppress_no_index=False): search_scope = SearchScope.create( find_links=find_links, index_urls=index_urls, ) - link_collector = LinkCollector( - session=session, search_scope=search_scope, + return LinkCollector( + session=session, + search_scope=search_scope, ) - return link_collector @property def find_links(self): @@ -651,12 +640,9 @@ def collect_links(self, project_name): url_locations = _remove_duplicate_links(url_locations) lines = [ - '{} location(s) to search for versions of {}:'.format( - len(url_locations), project_name, - ), + f'{len(url_locations)} location(s) to search for versions of {project_name}:' ] - for link in url_locations: - lines.append(f'* {link}') + lines.extend(f'* {link}' for link in url_locations) logger.debug('\n'.join(lines)) return CollectedLinks( diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/index/package_finder.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/index/package_finder.py index 3e3b1a3..ca2c17a 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/index/package_finder.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/index/package_finder.py @@ -173,8 +173,7 @@ def evaluate_link(self, link): if ext not in SUPPORTED_EXTENSIONS: return (False, f'unsupported archive format: {ext}') if "binary" not in self._formats and ext == WHEEL_EXTENSION: - reason = 'No binaries permitted for {}'.format( - self.project_name) + reason = f'No binaries permitted for {self.project_name}' return (False, reason) if "macosx10" in link.path and ext == '.zip': return (False, 'macosx10 one') @@ -184,8 +183,7 @@ def evaluate_link(self, link): except InvalidWheelFilename: return (False, 'invalid wheel filename') if canonicalize_name(wheel.name) != self._canonical_name: - reason = 'wrong project name (not {})'.format( - self.project_name) + reason = f'wrong project name (not {self.project_name})' return (False, reason) supported_tags = self._target_python.get_tags() @@ -193,11 +191,7 @@ def evaluate_link(self, link): # Include the wheel's tags in the reason string to # simplify troubleshooting compatibility issues. file_tags = wheel.get_formatted_file_tags() - reason = ( - "none of the wheel's tags match: {}".format( - ', '.join(file_tags) - ) - ) + reason = f"none of the wheel's tags match: {', '.join(file_tags)}" return (False, reason) version = wheel.version @@ -215,8 +209,7 @@ def evaluate_link(self, link): reason = f'Missing project version for {self.project_name}' return (False, reason) - match = self._py_version_re.search(version) - if match: + if match := self._py_version_re.search(version): version = version[:match.start()] py_version = match.group(1) if py_version != self._target_python.py_version: @@ -283,12 +276,7 @@ def filter_unallowed_hashes( matches_or_no_digest.append(candidate) - if match_count: - filtered = matches_or_no_digest - else: - # Make sure we're not returning back the given value. - filtered = list(candidates) - + filtered = matches_or_no_digest if match_count else list(candidates) if len(filtered) == len(candidates): discard_message = 'discarding no candidates' else: @@ -511,7 +499,6 @@ def _sort_key(self, candidate): with the same version, would have to be considered equal """ valid_tags = self._supported_tags - support_num = len(valid_tags) build_tag = () # type: BuildTag binary_preference = 0 link = candidate.link @@ -520,8 +507,7 @@ def _sort_key(self, candidate): wheel = Wheel(link.filename) if not wheel.supported(valid_tags): raise UnsupportedWheel( - "{} is not a supported wheel for this platform. It " - "can't be sorted.".format(wheel.filename) + f"{wheel.filename} is not a supported wheel for this platform. It can't be sorted." ) if self._prefer_binary: binary_preference = 1 @@ -531,6 +517,7 @@ def _sort_key(self, candidate): build_tag_groups = match.groups() build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) else: # sdist + support_num = len(valid_tags) pri = -(support_num) has_allowed_hash = int(link.is_hash_allowed(self._hashes)) yank_value = -1 * int(link.is_yanked) # -1 for yanked. @@ -548,10 +535,7 @@ def sort_best_candidate( Return the best candidate per the instance's sort order, or None if no candidate is acceptable. """ - if not candidates: - return None - best_candidate = max(candidates, key=self._sort_key) - return best_candidate + return max(candidates, key=self._sort_key) if candidates else None def compute_best_candidate( self, @@ -1000,6 +984,4 @@ def _extract_version_from_fragment(fragment, canonical_name): except ValueError: return None version = fragment[version_start:] - if not version: - return None - return version + return version or None diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/locations.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/locations.py index d30132d..4c54317 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/locations.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/locations.py @@ -114,17 +114,14 @@ def distutils_scheme( i.root = root or i.root i.finalize_options() - scheme = {} - for key in SCHEME_KEYS: - scheme[key] = getattr(i, 'install_' + key) - + scheme = {key: getattr(i, f'install_{key}') for key in SCHEME_KEYS} # install_lib specified in setup.cfg should install *everything* # into there (i.e. it takes precedence over both purelib and # platlib). Note, i.install_lib is *always* set after # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config if 'install_lib' in d.get_option_dict('install'): - scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + scheme |= dict(purelib=i.install_lib, platlib=i.install_lib) if running_under_virtualenv(): scheme['headers'] = os.path.join( diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/direct_url.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/direct_url.py index bdc3ff3..60887ad 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/direct_url.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/direct_url.py @@ -124,9 +124,7 @@ def __init__( @classmethod def _from_dict(cls, d): # type: (Optional[Dict[str, Any]]) -> Optional[ArchiveInfo] - if d is None: - return None - return cls(hash=_get(d, str, "hash")) + return None if d is None else cls(hash=_get(d, str, "hash")) def _to_dict(self): # type: () -> Dict[str, Any] @@ -183,9 +181,7 @@ def _remove_auth_from_netloc(self, netloc): user_pass == "git" ): return netloc - if ENV_VAR_RE.match(user_pass): - return netloc - return netloc_no_user_pass + return netloc if ENV_VAR_RE.match(user_pass) else netloc_no_user_pass @property def redacted_url(self): @@ -196,10 +192,9 @@ def redacted_url(self): """ purl = urllib.parse.urlsplit(self.url) netloc = self._remove_auth_from_netloc(purl.netloc) - surl = urllib.parse.urlunsplit( + return urllib.parse.urlunsplit( (purl.scheme, netloc, purl.path, purl.query, purl.fragment) ) - return surl def validate(self): # type: () -> None diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/format_control.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/format_control.py index 1ab8a1f..6dfa4cd 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/format_control.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/format_control.py @@ -38,11 +38,7 @@ def __eq__(self, other): def __repr__(self): # type: () -> str - return "{}({}, {})".format( - self.__class__.__name__, - self.no_binary, - self.only_binary - ) + return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})" @staticmethod def handle_mutual_excludes(value, target, other): @@ -71,13 +67,13 @@ def handle_mutual_excludes(value, target, other): def get_allowed_formats(self, canonical_name): # type: (str) -> FrozenSet[str] result = {"binary", "source"} - if canonical_name in self.only_binary: - result.discard('source') - elif canonical_name in self.no_binary: - result.discard('binary') - elif ':all:' in self.only_binary: + if ( + canonical_name in self.only_binary + or canonical_name not in self.no_binary + and ':all:' in self.only_binary + ): result.discard('source') - elif ':all:' in self.no_binary: + elif canonical_name in self.no_binary or ':all:' in self.no_binary: result.discard('binary') return frozenset(result) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/link.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/link.py index bb847ab..10f8979 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/link.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/link.py @@ -73,7 +73,7 @@ def __init__( self._url = url self.comes_from = comes_from - self.requires_python = requires_python if requires_python else None + self.requires_python = requires_python or None self.yanked_reason = yanked_reason super().__init__(key=url, defining_class=Link) @@ -81,16 +81,15 @@ def __init__( self.cache_link_parsing = cache_link_parsing def __str__(self): - # type: () -> str - if self.requires_python: - rp = f' (requires-python:{self.requires_python})' - else: - rp = '' - if self.comes_from: - return '{} (from {}){}'.format( - redact_auth_from_url(self._url), self.comes_from, rp) - else: + if not self.comes_from: return redact_auth_from_url(str(self._url)) + # type: () -> str + rp = ( + f' (requires-python:{self.requires_python})' + if self.requires_python + else '' + ) + return f'{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}' def __repr__(self): # type: () -> str @@ -161,9 +160,7 @@ def url_without_fragment(self): def egg_fragment(self): # type: () -> Optional[str] match = self._egg_fragment_re.search(self._url) - if not match: - return None - return match.group(1) + return match.group(1) if match else None _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)') @@ -171,9 +168,7 @@ def egg_fragment(self): def subdirectory_fragment(self): # type: () -> Optional[str] match = self._subdirectory_fragment_re.search(self._url) - if not match: - return None - return match.group(1) + return match.group(1) if match else None _hash_re = re.compile( r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)' @@ -181,19 +176,11 @@ def subdirectory_fragment(self): @property def hash(self): - # type: () -> Optional[str] - match = self._hash_re.search(self._url) - if match: - return match.group(2) - return None + return match.group(2) if (match := self._hash_re.search(self._url)) else None @property def hash_name(self): - # type: () -> Optional[str] - match = self._hash_re.search(self._url) - if match: - return match.group(1) - return None + return match.group(1) if (match := self._hash_re.search(self._url)) else None @property def show_url(self): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/search_scope.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/search_scope.py index 775db4d..e8fe591 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/search_scope.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/models/search_scope.py @@ -79,8 +79,8 @@ def __init__( def get_formatted_locations(self): # type: () -> str lines = [] - redacted_index_urls = [] if self.index_urls and self.index_urls != [PyPI.simple_url]: + redacted_index_urls = [] for url in self.index_urls: redacted_index_url = redact_auth_from_url(url) @@ -100,8 +100,7 @@ def get_formatted_locations(self): redacted_index_urls.append(redacted_index_url) - lines.append('Looking in indexes: {}'.format( - ', '.join(redacted_index_urls))) + lines.append(f"Looking in indexes: {', '.join(redacted_index_urls)}") if self.find_links: lines.append( @@ -129,7 +128,7 @@ def mkurl_pypi_url(url): # implementations might break if they relied on easy_install's # behavior. if not loc.endswith('/'): - loc = loc + '/' + loc = f'{loc}/' return loc return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/auth.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/auth.py index 2cdf42e..1bc294a 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/auth.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/auth.py @@ -56,14 +56,10 @@ def get_keyring_auth(url, username): else: logger.debug("Getting credentials from keyring for %s", url) cred = get_credential(url, username) - if cred is not None: - return cred.username, cred.password - return None - + return (cred.username, cred.password) if cred is not None else None if username: logger.debug("Getting password from keyring for %s", url) - password = keyring.get_password(url, username) - if password: + if password := keyring.get_password(url, username): return username, password except Exception as exc: @@ -129,9 +125,7 @@ def _get_new_credentials(self, original_url, allow_netrc=True, # Find a matching index url for this request index_url = self._get_index_url(url) if index_url: - # Split the credentials from the url. - index_info = split_auth_netloc_from_url(index_url) - if index_info: + if index_info := split_auth_netloc_from_url(index_url): index_url, _, index_url_user_password = index_info logger.debug("Found index url %s", index_url) @@ -144,19 +138,16 @@ def _get_new_credentials(self, original_url, allow_netrc=True, # Get creds from netrc if we still don't have them if allow_netrc: - netrc_auth = get_netrc_auth(original_url) - if netrc_auth: + if netrc_auth := get_netrc_auth(original_url): logger.debug("Found credentials in netrc for %s", netloc) return netrc_auth # If we don't have a password and keyring is available, use it. if allow_keyring: - # The index url is more specific than the netloc, so try it first - kr_auth = ( - get_keyring_auth(index_url, username) or - get_keyring_auth(netloc, username) - ) - if kr_auth: + if kr_auth := ( + get_keyring_auth(index_url, username) + or get_keyring_auth(netloc, username) + ): logger.debug("Found credentials in keyring for %s", netloc) return kr_auth @@ -235,9 +226,11 @@ def _prompt_for_password(self, netloc): # Factored out to allow for easy patching in tests def _should_save_password_to_keyring(self): # type: () -> bool - if not keyring: - return False - return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" + return ( + ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" + if keyring + else False + ) def handle_401(self, resp, **kwargs): # type: (Response, **Any) -> Response diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/download.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/download.py index a802bf2..ef0760e 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/download.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/download.py @@ -50,7 +50,7 @@ def _prepare_download( logged_url = redact_auth_from_url(url) if total_length: - logged_url = '{} ({})'.format(logged_url, format_size(total_length)) + logged_url = f'{logged_url} ({format_size(total_length)})' if is_from_cache(resp): logger.info("Using cached %s", logged_url) @@ -70,12 +70,11 @@ def _prepare_download( chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) - if not show_progress: - return chunks - - return DownloadProgressProvider( - progress_bar, max=total_length - )(chunks) + return ( + DownloadProgressProvider(progress_bar, max=total_length)(chunks) + if show_progress + else chunks + ) def sanitize_content_filename(filename): @@ -107,9 +106,7 @@ def _get_http_response_filename(resp, link): the link filename if not provided. """ filename = link.filename # fallback - # Have a look at the Content-Disposition header for a better guess - content_disposition = resp.headers.get('content-disposition') - if content_disposition: + if content_disposition := resp.headers.get('content-disposition'): filename = parse_content_disposition(content_disposition, filename) ext = splitext(filename)[1] # type: Optional[str] if not ext: @@ -119,8 +116,7 @@ def _get_http_response_filename(resp, link): if ext: filename += ext if not ext and link.url != resp.url: - ext = os.path.splitext(resp.url)[1] - if ext: + if ext := os.path.splitext(resp.url)[1]: filename += ext return filename diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/session.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/session.py index 8b4b39d..0c9ef67 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/session.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/session.py @@ -108,7 +108,13 @@ def user_agent(): }, } - if data["implementation"]["name"] == 'CPython': + if ( + data["implementation"]["name"] == 'CPython' + or data["implementation"]["name"] != 'PyPy' + and data["implementation"]["name"] == 'Jython' + or data["implementation"]["name"] != 'PyPy' + and data["implementation"]["name"] == 'IronPython' + ): data["implementation"]["version"] = platform.python_version() elif data["implementation"]["name"] == 'PyPy': if sys.pypy_version_info.releaselevel == 'final': @@ -118,24 +124,18 @@ def user_agent(): data["implementation"]["version"] = ".".join( [str(x) for x in pypy_version_info] ) - elif data["implementation"]["name"] == 'Jython': - # Complete Guess - data["implementation"]["version"] = platform.python_version() - elif data["implementation"]["name"] == 'IronPython': - # Complete Guess - data["implementation"]["version"] = platform.python_version() - if sys.platform.startswith("linux"): from pip._vendor import distro distro_infos = dict(filter( lambda x: x[1], zip(["name", "version", "id"], distro.linux_distribution()), )) - libc = dict(filter( - lambda x: x[1], - zip(["lib", "version"], libc_ver()), - )) - if libc: + if libc := dict( + filter( + lambda x: x[1], + zip(["lib", "version"], libc_ver()), + ) + ): distro_infos["libc"] = libc if distro_infos: data["distro"] = distro_infos @@ -327,16 +327,10 @@ def add_trusted_host(self, host, source=None, suppress_logging=False): if host_port not in self.pip_trusted_origins: self.pip_trusted_origins.append(host_port) - self.mount( - build_url_from_netloc(host) + '/', - self._trusted_host_adapter - ) + self.mount(f'{build_url_from_netloc(host)}/', self._trusted_host_adapter) if not host_port[1]: # Mount wildcard ports for the same host. - self.mount( - build_url_from_netloc(host) + ':', - self._trusted_host_adapter - ) + self.mount(f'{build_url_from_netloc(host)}:', self._trusted_host_adapter) def iter_secure_origins(self): # type: () -> Iterator[SecureOrigin] @@ -363,7 +357,7 @@ def is_secure_origin(self, location): # configured on this PackageFinder instance. for secure_origin in self.iter_secure_origins(): secure_protocol, secure_host, secure_port = secure_origin - if origin_protocol != secure_protocol and secure_protocol != "*": + if origin_protocol != secure_protocol != "*": continue try: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/utils.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/utils.py index c9f7286..e74d576 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/utils.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/network/utils.py @@ -44,12 +44,14 @@ def raise_for_status(resp): reason = resp.reason if 400 <= resp.status_code < 500: - http_error_msg = '%s Client Error: %s for url: %s' % ( - resp.status_code, reason, resp.url) + http_error_msg = ( + f'{resp.status_code} Client Error: {reason} for url: {resp.url}' + ) elif 500 <= resp.status_code < 600: - http_error_msg = '%s Server Error: %s for url: %s' % ( - resp.status_code, reason, resp.url) + http_error_msg = ( + f'{resp.status_code} Server Error: {reason} for url: {resp.url}' + ) if http_error_msg: raise NetworkConnectionError(http_error_msg, response=resp) @@ -61,7 +63,7 @@ def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE): """ try: # Special case for urllib3. - for chunk in response.raw.stream( + yield from response.raw.stream( chunk_size, # We use decode_content=False here because we don't # want urllib3 to mess with the raw bytes we get @@ -86,12 +88,11 @@ def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE): # By setting this not to decode automatically we # hope to eliminate problems with the second case. decode_content=False, - ): - yield chunk + ) except AttributeError: # Standard file-like object. while True: - chunk = response.raw.read(chunk_size) - if not chunk: + if chunk := response.raw.read(chunk_size): + yield chunk + else: break - yield chunk diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/check.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/check.py index b2381d7..3e43bb3 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/check.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/check.py @@ -36,7 +36,7 @@ def create_package_set_from_installed(**kwargs): """Converts a list of distributions into a PackageSet. """ # Default to using all packages installed on the system - if kwargs == {}: + if not kwargs: kwargs = {"local_only": False, "skip": ()} package_set = {} diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/freeze.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/freeze.py index a6f967c..7d7c250 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/freeze.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/freeze.py @@ -107,11 +107,22 @@ def freeze( yield line continue - if line.startswith('-e') or line.startswith('--editable'): - if line.startswith('-e'): - line = line[2:].strip() - else: - line = line[len('--editable'):].strip().lstrip('=') + if line.startswith('-e'): + line = ( + line[2:].strip() + if line.startswith('-e') + else line[len('--editable') :].strip().lstrip('=') + ) + line_req = install_req_from_editable( + line, + isolated=isolated, + ) + elif line.startswith('--editable'): + line = ( + line[2:].strip() + if line.startswith('-e') + else line[len('--editable') :].strip().lstrip('=') + ) line_req = install_req_from_editable( line, isolated=isolated, @@ -135,25 +146,22 @@ def freeze( else: line_req_canonical_name = canonicalize_name( line_req.name) - if line_req_canonical_name not in installations: - # either it's not installed, or it is installed - # but has been processed already - if not req_files[line_req.name]: - logger.warning( - "Requirement file [%s] contains %s, but " - "package %r is not installed", - req_file_path, - COMMENT_RE.sub('', line).strip(), - line_req.name - ) - else: - req_files[line_req.name].append(req_file_path) - else: + if line_req_canonical_name in installations: yield str(installations[ line_req_canonical_name]).rstrip() del installations[line_req_canonical_name] req_files[line_req.name].append(req_file_path) + elif not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but " + "package %r is not installed", + req_file_path, + COMMENT_RE.sub('', line).strip(), + line_req.name + ) + else: + req_files[line_req.name].append(req_file_path) # Warn about requirements that were included multiple times (in a # single requirements file or in different requirements files). for name, files in req_files.items(): @@ -201,9 +209,7 @@ def get_requirement_info(dist): except RemoteNotFoundError: req = dist.as_requirement() comments = [ - '# Editable {} install with no remote ({})'.format( - type(vcs_backend).__name__, req, - ) + f'# Editable {type(vcs_backend).__name__} install with no remote ({req})' ] return (location, True, comments) @@ -249,9 +255,7 @@ def from_dist(cls, dist): # editable that provide .dist-info metadata. req, editable, comments = get_requirement_info(dist) if req is None and not editable: - # if PEP 610 metadata is present, attempt to use it - direct_url = dist_get_direct_url(dist) - if direct_url: + if direct_url := dist_get_direct_url(dist): req = direct_url_as_pep440_direct_reference( direct_url, dist.project_name ) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/install/wheel.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/install/wheel.py index 069d79d..a1d9758 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/install/wheel.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/install/wheel.py @@ -188,15 +188,12 @@ def message_about_scripts_not_on_PATH(scripts): for parent_dir, dir_scripts in warn_for.items(): sorted_scripts = sorted(dir_scripts) # type: List[str] if len(sorted_scripts) == 1: - start_text = "script {} is".format(sorted_scripts[0]) + start_text = f"script {sorted_scripts[0]} is" else: - start_text = "scripts {} are".format( - ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] - ) + start_text = f'scripts {", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]} are' msg_lines.append( - "The {} installed in '{}' which is not on PATH." - .format(start_text, parent_dir) + f"The {start_text} installed in '{parent_dir}' which is not on PATH." ) last_line_fmt = ( @@ -256,12 +253,12 @@ def _record_to_fs_path(record_path): def _fs_to_record_path(path, relative_to=None): # type: (str, Optional[str]) -> RecordPath - if relative_to is not None: - # On Windows, do not handle relative paths if they belong to different - # logical disks - if os.path.splitdrive(path)[0].lower() == \ - os.path.splitdrive(relative_to)[0].lower(): - path = os.path.relpath(path, relative_to) + if ( + relative_to is not None + and os.path.splitdrive(path)[0].lower() + == os.path.splitdrive(relative_to)[0].lower() + ): + path = os.path.relpath(path, relative_to) path = path.replace(os.path.sep, '/') return cast('RecordPath', path) @@ -300,8 +297,10 @@ def get_csv_rows_for_installed( path = _fs_to_record_path(f, lib_dir) digest, length = rehash(f) installed_rows.append((path, digest, length)) - for installed_record_path in installed.values(): - installed_rows.append((installed_record_path, '', '')) + installed_rows.extend( + (installed_record_path, '', '') + for installed_record_path in installed.values() + ) return installed_rows @@ -316,48 +315,12 @@ def get_console_script_specs(console): scripts_to_generate = [] - # Special case pip and setuptools to generate versioned wrappers - # - # The issue is that some projects (specifically, pip and setuptools) use - # code in setup.py to create "versioned" entry points - pip2.7 on Python - # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into - # the wheel metadata at build time, and so if the wheel is installed with - # a *different* version of Python the entry points will be wrong. The - # correct fix for this is to enhance the metadata to be able to describe - # such versioned entry points, but that won't happen till Metadata 2.0 is - # available. - # In the meantime, projects using versioned entry points will either have - # incorrect versioned entry points, or they will not be able to distribute - # "universal" wheels (i.e., they will need a wheel per Python version). - # - # Because setuptools and pip are bundled with _ensurepip and virtualenv, - # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we - # override the versioned entry points in the wheel and generate the - # correct ones. This code is purely a short-term measure until Metadata 2.0 - # is available. - # - # To add the level of hack in this section of code, in order to support - # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment - # variable which will control which version scripts get installed. - # - # ENSUREPIP_OPTIONS=altinstall - # - Only pipX.Y and easy_install-X.Y will be generated and installed - # ENSUREPIP_OPTIONS=install - # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note - # that this option is technically if ENSUREPIP_OPTIONS is set and is - # not altinstall - # DEFAULT - # - The default behavior is to install pip, pipX, pipX.Y, easy_install - # and easy_install-X.Y. - pip_script = console.pop('pip', None) - if pip_script: + if pip_script := console.pop('pip', None): if "ENSUREPIP_OPTIONS" not in os.environ: - scripts_to_generate.append('pip = ' + pip_script) + scripts_to_generate.append(f'pip = {pip_script}') if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": - scripts_to_generate.append( - 'pip{} = {}'.format(sys.version_info[0], pip_script) - ) + scripts_to_generate.append(f'pip{sys.version_info[0]} = {pip_script}') scripts_to_generate.append( f'pip{get_major_minor_version()} = {pip_script}' @@ -366,17 +329,12 @@ def get_console_script_specs(console): pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] for k in pip_ep: del console[k] - easy_install_script = console.pop('easy_install', None) - if easy_install_script: + if easy_install_script := console.pop('easy_install', None): if "ENSUREPIP_OPTIONS" not in os.environ: - scripts_to_generate.append( - 'easy_install = ' + easy_install_script - ) + scripts_to_generate.append(f'easy_install = {easy_install_script}') scripts_to_generate.append( - 'easy_install-{} = {}'.format( - get_major_minor_version(), easy_install_script - ) + f'easy_install-{get_major_minor_version()} = {easy_install_script}' ) # Delete any other versioned easy_install entry points easy_install_ep = [ @@ -450,10 +408,7 @@ class MissingCallableSuffix(InstallationError): def __init__(self, entry_point): # type: (str) -> None super().__init__( - "Invalid script entry point: {} - A callable " - "suffix is required. Cf https://packaging.python.org/" - "specifications/entry-points/#use-for-scripts for more " - "information.".format(entry_point) + f"Invalid script entry point: {entry_point} - A callable suffix is required. Cf https://packaging.python.org/specifications/entry-points/#use-for-scripts for more information." ) @@ -499,11 +454,7 @@ def _install_wheel( """ info_dir, metadata = parse_wheel(wheel_zip, name) - if wheel_root_is_purelib(metadata): - lib_dir = scheme.purelib - else: - lib_dir = scheme.platlib - + lib_dir = scheme.purelib if wheel_root_is_purelib(metadata) else scheme.platlib # Record details of the files moved # installed = files copied from the wheel to the destination # changed = files changed while installing (scripts #! line typically) @@ -797,7 +748,7 @@ def req_error_context(req_description): try: yield except InstallationError as e: - message = "For req: {}. {}".format(req_description, e.args[0]) + message = f"For req: {req_description}. {e.args[0]}" reraise( InstallationError, InstallationError(message), sys.exc_info()[2] ) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/prepare.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/prepare.py index 054b03f..1cddd83 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/prepare.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/operations/prepare.py @@ -179,11 +179,7 @@ def get_file_url( link, download_dir, hashes ) - if already_downloaded_path: - from_path = already_downloaded_path - else: - from_path = link.file_path - + from_path = already_downloaded_path or link.file_path # If --require-hashes is off, `hashes` is either empty, the # link's embedded hash, or MissingHashes; it is required to # match. If --require-hashes is on, we are satisfied by any @@ -360,11 +356,7 @@ def _ensure_link_req_src_dir(self, req, parallel_builds): # package unpacked in `req.source_dir` if os.path.exists(os.path.join(req.source_dir, 'setup.py')): raise PreviousBuildDirError( - "pip can't proceed with requirements '{}' due to a" - "pre-existing build directory ({}). This is likely " - "due to a previous installation that failed . pip is " - "being responsible and not assuming it can delete this. " - "Please delete it and try again.".format(req, req.source_dir) + f"pip can't proceed with requirements '{req}' due to apre-existing build directory ({req.source_dir}). This is likely due to a previous installation that failed . pip is being responsible and not assuming it can delete this. Please delete it and try again." ) def _get_linked_req_hashes(self, req): @@ -483,8 +475,7 @@ def _prepare_linked_requirement(self, req, parallel_builds): ) except NetworkConnectionError as exc: raise InstallationError( - 'Could not install requirement {} because of HTTP ' - 'error {} for URL {}'.format(req, exc, link) + f'Could not install requirement {req} because of HTTP error {exc} for URL {link}' ) else: file_path, content_type = self._downloaded[link.url] @@ -497,10 +488,12 @@ def _prepare_linked_requirement(self, req, parallel_builds): if local_file: req.local_file_path = local_file.path - dist = _get_prepared_distribution( - req, self.req_tracker, self.finder, self.build_isolation, + return _get_prepared_distribution( + req, + self.req_tracker, + self.finder, + self.build_isolation, ) - return dist def save_linked_requirement(self, req): # type: (InstallRequirement) -> None @@ -542,9 +535,7 @@ def prepare_editable_requirement( with indent_log(): if self.require_hashes: raise InstallationError( - 'The editable requirement {} cannot be installed when ' - 'requiring hashes, because there is no single file to ' - 'hash.'.format(req) + f'The editable requirement {req} cannot be installed when requiring hashes, because there is no single file to hash.' ) req.ensure_has_source_dir(self.src_dir) req.update_editable() @@ -566,10 +557,9 @@ def prepare_installed_requirement( """Prepare an already-installed requirement """ assert req.satisfied_by, "req should have been satisfied but isn't" - assert skip_reason is not None, ( - "did not get skip reason skipped but req.satisfied_by " - "is set to {}".format(req.satisfied_by) - ) + assert ( + skip_reason is not None + ), f"did not get skip reason skipped but req.satisfied_by is set to {req.satisfied_by}" logger.info( 'Requirement %s: %s (%s)', skip_reason, req, req.satisfied_by.version diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/pyproject.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/pyproject.py index 36f8963..14c1524 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/pyproject.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/pyproject.py @@ -73,27 +73,25 @@ def load_pyproject_toml( # opposed to False can occur when the value is provided via an # environment variable or config file option (due to the quirk of # strtobool() returning an integer in pip's configuration code). - if has_pyproject and not has_setup: - if use_pep517 is not None and not use_pep517: - raise InstallationError( - "Disabling PEP 517 processing is invalid: " - "project does not have a setup.py" - ) + if ( + has_pyproject + and not has_setup + and (use_pep517 is None or use_pep517) + or (not has_pyproject or has_setup) + and build_system + and "build-backend" in build_system + and (use_pep517 is None or use_pep517) + ): use_pep517 = True + elif has_pyproject and not has_setup: + raise InstallationError( + "Disabling PEP 517 processing is invalid: " + "project does not have a setup.py" + ) elif build_system and "build-backend" in build_system: - if use_pep517 is not None and not use_pep517: - raise InstallationError( - "Disabling PEP 517 processing is invalid: " - "project specifies a build backend of {} " - "in pyproject.toml".format( - build_system["build-backend"] - ) - ) - use_pep517 = True - - # If we haven't worked out whether to use PEP 517 yet, - # and the user hasn't explicitly stated a preference, - # we do so if the project has a pyproject.toml file. + raise InstallationError( + f'Disabling PEP 517 processing is invalid: project specifies a build backend of {build_system["build-backend"]} in pyproject.toml' + ) elif use_pep517 is None: use_pep517 = has_pyproject diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/constructors.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/constructors.py index 87f7378..be1b370 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/constructors.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/constructors.py @@ -49,8 +49,8 @@ def _strip_extras(path): m = re.match(r'^(.+)(\[[^\]]+\])$', path) extras = None if m: - path_no_extras = m.group(1) - extras = m.group(2) + path_no_extras = m[1] + extras = m[2] else: path_no_extras = path @@ -59,9 +59,7 @@ def _strip_extras(path): def convert_extras(extras): # type: (Optional[str]) -> Set[str] - if not extras: - return set() - return Requirement("placeholder" + extras.lower()).extras + return Requirement(f"placeholder{extras.lower()}").extras if extras else set() def parse_editable(editable_req): @@ -83,10 +81,7 @@ def parse_editable(editable_req): if os.path.isdir(url_no_extras): if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): - msg = ( - 'File "setup.py" not found. Directory cannot be installed ' - 'in editable mode: {}'.format(os.path.abspath(url_no_extras)) - ) + msg = f'File "setup.py" not found. Directory cannot be installed in editable mode: {os.path.abspath(url_no_extras)}' pyproject_path = make_pyproject_path(url_no_extras) if os.path.isfile(pyproject_path): msg += ( @@ -104,7 +99,7 @@ def parse_editable(editable_req): return ( package_name, url_no_extras, - Requirement("placeholder" + extras.lower()).extras, + Requirement(f"placeholder{extras.lower()}").extras, ) else: return package_name, url_no_extras, set() @@ -124,13 +119,12 @@ def parse_editable(editable_req): f'(beginning with {backends}).' ) - package_name = link.egg_fragment - if not package_name: + if package_name := link.egg_fragment: + return package_name, url, set() + else: raise InstallationError( - "Could not detect requirement name for '{}', please specify one " - "with #egg=your_package_name".format(editable_req) + f"Could not detect requirement name for '{editable_req}', please specify one with #egg=your_package_name" ) - return package_name, url, set() def deduce_helpful_msg(req): @@ -148,13 +142,7 @@ def deduce_helpful_msg(req): with open(req, 'r') as fp: # parse first line only next(parse_requirements(fp.read())) - msg += ( - "The argument you provided " - "({}) appears to be a" - " requirements file. If that is the" - " case, use the '-r' flag to install" - " the packages specified within it." - ).format(req) + msg += f"The argument you provided ({req}) appears to be a requirements file. If that is the case, use the '-r' flag to install the packages specified within it." except RequirementParseError: logger.debug( "Cannot parse '%s' as requirements file", req, exc_info=True @@ -242,9 +230,7 @@ def _looks_like_path(name): return True if os.path.altsep is not None and os.path.altsep in name: return True - if name.startswith("."): - return True - return False + return bool(name.startswith(".")) def _get_url_from_path(path, name): @@ -283,17 +269,11 @@ def _get_url_from_path(path, name): def parse_req_from_line(name, line_source): # type: (str, Optional[str]) -> RequirementParts - if is_url(name): - marker_sep = '; ' - else: - marker_sep = ';' + marker_sep = '; ' if is_url(name) else ';' if marker_sep in name: name, markers_as_string = name.split(marker_sep, 1) markers_as_string = markers_as_string.strip() - if not markers_as_string: - markers = None - else: - markers = Marker(markers_as_string) + markers = Marker(markers_as_string) if markers_as_string else None else: markers = None name = name.strip() @@ -333,9 +313,7 @@ def parse_req_from_line(name, line_source): def with_source(text): # type: (str) -> str - if not line_source: - return text - return f'{text} (from {line_source})' + return f'{text} (from {line_source})' if line_source else text if req_as_string is not None: try: @@ -344,8 +322,9 @@ def with_source(text): if os.path.sep in req_as_string: add_msg = "It looks like a path." add_msg += deduce_helpful_msg(req_as_string) - elif ('=' in req_as_string and - not any(op in req_as_string for op in operators)): + elif '=' in req_as_string and all( + op not in req_as_string for op in operators + ): add_msg = "= is not a valid operator. Did you mean == ?" else: add_msg = '' @@ -423,9 +402,7 @@ def install_req_from_req_string( comes_from.link.netloc in domains_not_allowed): # Explicitly disallow pypi packages that depend on external urls raise InstallationError( - "Packages installed from PyPI cannot depend on packages " - "which are not also hosted on PyPI.\n" - "{} depends on {} ".format(comes_from.name, req) + f"Packages installed from PyPI cannot depend on packages which are not also hosted on PyPI.\n{comes_from.name} depends on {req} " ) return InstallRequirement( @@ -443,9 +420,8 @@ def install_req_from_parsed_requirement( use_pep517=None, # type: Optional[bool] user_supplied=False, # type: bool ): - # type: (...) -> InstallRequirement - if parsed_req.is_editable: - req = install_req_from_editable( + return ( + install_req_from_editable( parsed_req.requirement, comes_from=parsed_req.comes_from, use_pep517=use_pep517, @@ -453,9 +429,8 @@ def install_req_from_parsed_requirement( isolated=isolated, user_supplied=user_supplied, ) - - else: - req = install_req_from_line( + if parsed_req.is_editable + else install_req_from_line( parsed_req.requirement, comes_from=parsed_req.comes_from, use_pep517=use_pep517, @@ -465,4 +440,4 @@ def install_req_from_parsed_requirement( line_source=parsed_req.line_source, user_supplied=user_supplied, ) - return req + ) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_file.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_file.py index cf61f82..9e35b2e 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_file.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_file.py @@ -175,9 +175,7 @@ def handle_requirement_line( # type: (...) -> ParsedRequirement # preserve for the nested code path - line_comes_from = '{} {} (line {})'.format( - '-c' if line.constraint else '-r', line.filename, line.lineno, - ) + line_comes_from = f"{'-c' if line.constraint else '-r'} {line.filename} (line {line.lineno})" assert line.is_requirement @@ -190,26 +188,25 @@ def handle_requirement_line( comes_from=line_comes_from, constraint=line.constraint, ) - else: - if options: - # Disable wheels if the user has specified build options - cmdoptions.check_install_build_global(options, line.opts) + if options: + # Disable wheels if the user has specified build options + cmdoptions.check_install_build_global(options, line.opts) # get the options that apply to requirements - req_options = {} - for dest in SUPPORTED_OPTIONS_REQ_DEST: - if dest in line.opts.__dict__ and line.opts.__dict__[dest]: - req_options[dest] = line.opts.__dict__[dest] - - line_source = f'line {line.lineno} of {line.filename}' - return ParsedRequirement( - requirement=line.requirement, - is_editable=line.is_editable, - comes_from=line_comes_from, - constraint=line.constraint, - options=req_options, - line_source=line_source, - ) + req_options = { + dest: line.opts.__dict__[dest] + for dest in SUPPORTED_OPTIONS_REQ_DEST + if dest in line.opts.__dict__ and line.opts.__dict__[dest] + } + line_source = f'line {line.lineno} of {line.filename}' + return ParsedRequirement( + requirement=line.requirement, + is_editable=line.is_editable, + comes_from=line_comes_from, + constraint=line.constraint, + options=req_options, + line_source=line_source, + ) def handle_option_line( @@ -306,18 +303,16 @@ def handle_line( """ if line.is_requirement: - parsed_req = handle_requirement_line(line, options) - return parsed_req - else: - handle_option_line( - line.opts, - line.filename, - line.lineno, - finder, - options, - session, - ) - return None + return handle_requirement_line(line, options) + handle_option_line( + line.opts, + line.filename, + line.lineno, + finder, + options, + session, + ) + return None class RequirementsFileParser: @@ -422,9 +417,8 @@ def break_args_options(line): for token in tokens: if token.startswith('-') or token.startswith('--'): break - else: - args.append(token) - options.pop(0) + args.append(token) + options.pop(0) return ' '.join(args), ' '.join(options) @@ -469,7 +463,7 @@ def join_lines(lines_enum): if not line.endswith('\\') or COMMENT_RE.match(line): if COMMENT_RE.match(line): # this ensures comments are always matched later - line = ' ' + line + line = f' {line}' if new_line: new_line.append(line) assert primary_line_number is not None @@ -497,8 +491,7 @@ def ignore_comments(lines_enum): """ for line_number, line in lines_enum: line = COMMENT_RE.sub('', line) - line = line.strip() - if line: + if line := line.strip(): yield line_number, line @@ -521,11 +514,8 @@ def expand_env_variables(lines_enum): """ for line_number, line in lines_enum: for env_var, var_name in ENV_VAR_RE.findall(line): - value = os.getenv(var_name) - if not value: - continue - - line = line.replace(env_var, value) + if value := os.getenv(var_name): + line = line.replace(env_var, value) yield line_number, line diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_install.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_install.py index de30193..00d57c2 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_install.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_install.py @@ -170,9 +170,9 @@ def __init__( # Set to True after successful installation self.install_succeeded = None # type: Optional[bool] # Supplied options - self.install_options = install_options if install_options else [] - self.global_options = global_options if global_options else [] - self.hash_options = hash_options if hash_options else {} + self.install_options = install_options or [] + self.global_options = global_options or [] + self.hash_options = hash_options or {} # Set to True after successful preparation of this requirement self.prepared = False # User supplied requirement are explicitly requested for installation @@ -213,13 +213,13 @@ def __str__(self): if self.req: s = str(self.req) if self.link: - s += ' from {}'.format(redact_auth_from_url(self.link.url)) + s += f' from {redact_auth_from_url(self.link.url)}' elif self.link: s = redact_auth_from_url(self.link.url) else: s = '' if self.satisfied_by is not None: - s += ' in {}'.format(display_path(self.satisfied_by.location)) + s += f' in {display_path(self.satisfied_by.location)}' if self.comes_from: if isinstance(self.comes_from, str): comes_from = self.comes_from # type: Optional[str] @@ -337,7 +337,7 @@ def from_path(self): else: comes_from = self.comes_from.from_path() if comes_from: - s += '->' + comes_from + s += f'->{comes_from}' return s def ensure_build_location(self, build_dir, autodelete, parallel_builds): @@ -435,34 +435,36 @@ def check_if_exists(self, use_user_site): existing_version = existing_dist.parsed_version if not self.req.specifier.contains(existing_version, prereleases=True): self.satisfied_by = None - if use_user_site: - if dist_in_usersite(existing_dist): - self.should_reinstall = True - elif (running_under_virtualenv() and - dist_in_site_packages(existing_dist)): - raise InstallationError( - "Will not install to the user site because it will " - "lack sys.path precedence to {} in {}".format( - existing_dist.project_name, existing_dist.location) - ) - else: + if ( + use_user_site + and dist_in_usersite(existing_dist) + or not use_user_site + ): self.should_reinstall = True + elif ( + use_user_site + and not dist_in_usersite(existing_dist) + and ( + running_under_virtualenv() + and dist_in_site_packages(existing_dist) + ) + ): + raise InstallationError( + f"Will not install to the user site because it will lack sys.path precedence to {existing_dist.project_name} in {existing_dist.location}" + ) + elif self.editable: + self.should_reinstall = True + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None else: - if self.editable: - self.should_reinstall = True - # when installing editables, nothing pre-existing should ever - # satisfy - self.satisfied_by = None - else: - self.satisfied_by = existing_dist + self.satisfied_by = existing_dist # Things valid for wheels @property def is_wheel(self): # type: () -> bool - if not self.link: - return False - return self.link.is_wheel + return self.link.is_wheel if self.link else False # Things valid for sdists @property @@ -476,9 +478,7 @@ def unpacked_source_directory(self): def setup_py_path(self): # type: () -> str assert self.source_dir, f"No source dir for {self}" - setup_py = os.path.join(self.unpacked_source_directory, 'setup.py') - - return setup_py + return os.path.join(self.unpacked_source_directory, 'setup.py') @property def pyproject_toml_path(self): @@ -673,7 +673,7 @@ def _clean_zip_name(name, prefix): path = os.path.join(parentdir, path) name = _clean_zip_name(path, rootdir) - return self.name + '/' + name + return f'{self.name}/{name}' def archive(self, build_dir): # type: (Optional[str]) -> None @@ -686,15 +686,14 @@ def archive(self, build_dir): return create_archive = True - archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"]) + archive_name = f'{self.name}-{self.metadata["version"]}.zip' archive_path = os.path.join(build_dir, archive_name) if os.path.exists(archive_path): response = ask_path_exists( - 'The file {} exists. (i)gnore, (w)ipe, ' - '(b)ackup, (a)bort '.format( - display_path(archive_path)), - ('i', 'w', 'b', 'a')) + f'The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ', + ('i', 'w', 'b', 'a'), + ) if response == 'i': create_archive = False elif response == 'w': @@ -726,7 +725,7 @@ def archive(self, build_dir): dir_arcname = self._get_archive_name( dirname, parentdir=dirpath, rootdir=dir, ) - zipdir = zipfile.ZipInfo(dir_arcname + '/') + zipdir = zipfile.ZipInfo(f'{dir_arcname}/') zipdir.external_attr = 0x1ED << 16 # 0o755 zip_output.writestr(zipdir, '') for filename in filenames: @@ -836,11 +835,7 @@ def install( if success and self.legacy_install_reason == 8368: deprecated( - reason=( - "{} was installed using the legacy 'setup.py install' " - "method, because a wheel could not be built for it.". - format(self.name) - ), + reason=f"{self.name} was installed using the legacy 'setup.py install' method, because a wheel could not be built for it.", replacement="to fix the wheel build issue reported above", gone_in=None, issue=8368, diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_set.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_set.py index bcd0514..cc57d61 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_set.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_set.py @@ -101,8 +101,7 @@ def add_requirement( tags = compatibility_tags.get_supported() if (self.check_supported_wheels and not wheel.supported(tags)): raise InstallationError( - "{} is not a supported wheel on this platform.".format( - wheel.filename) + f"{wheel.filename} is not a supported wheel on this platform." ) # This next bit is really a sanity check. @@ -122,14 +121,13 @@ def add_requirement( except KeyError: existing_req = None - has_conflicting_requirement = ( - parent_req_name is None and - existing_req and - not existing_req.constraint and - existing_req.extras == install_req.extras and - existing_req.req.specifier != install_req.req.specifier - ) - if has_conflicting_requirement: + if has_conflicting_requirement := ( + parent_req_name is None + and existing_req + and not existing_req.constraint + and existing_req.extras == install_req.extras + and existing_req.req.specifier != install_req.req.specifier + ): raise InstallationError( "Double requirement given: {} (already in {}, name={!r})" .format(install_req, existing_req, install_req.name) @@ -147,18 +145,15 @@ def add_requirement( if install_req.constraint or not existing_req.constraint: return [], existing_req - does_not_satisfy_constraint = ( - install_req.link and - not ( - existing_req.link and - install_req.link.path == existing_req.link.path + if does_not_satisfy_constraint := ( + install_req.link + and not ( + existing_req.link + and install_req.link.path == existing_req.link.path ) - ) - if does_not_satisfy_constraint: + ): raise InstallationError( - "Could not satisfy constraints for '{}': " - "installation from path or url cannot be " - "constrained to a version".format(install_req.name) + f"Could not satisfy constraints for '{install_req.name}': installation from path or url cannot be constrained to a version" ) # If we're now installing a constraint, mark the existing # object for real installation. diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_tracker.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_tracker.py index b06941e..537570a 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_tracker.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_tracker.py @@ -105,8 +105,7 @@ def add(self, req): except FileNotFoundError: pass else: - message = '{} is already being built: {}'.format( - req.link, contents) + message = f'{req.link} is already being built: {contents}' raise LookupError(message) # If we're here, req should really not be building already. diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_uninstall.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_uninstall.py index 7d632d6..bd1c6d7 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_uninstall.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/req/req_uninstall.py @@ -49,19 +49,16 @@ def _script_names(dist, script_name, is_gui): {console,gui}_scripts for the given ``dist``. Returns the list of file names """ - if dist_in_usersite(dist): - bin_dir = bin_user - else: - bin_dir = bin_py + bin_dir = bin_user if dist_in_usersite(dist) else bin_py exe_name = os.path.join(bin_dir, script_name) paths_to_remove = [exe_name] if WINDOWS: - paths_to_remove.append(exe_name + '.exe') - paths_to_remove.append(exe_name + '.exe.manifest') + paths_to_remove.append(f'{exe_name}.exe') + paths_to_remove.append(f'{exe_name}.exe.manifest') if is_gui: - paths_to_remove.append(exe_name + '-script.pyw') + paths_to_remove.append(f'{exe_name}-script.pyw') else: - paths_to_remove.append(exe_name + '-script.py') + paths_to_remove.append(f'{exe_name}-script.py') return paths_to_remove @@ -96,10 +93,8 @@ def uninstallation_paths(dist): if path.endswith('.py'): dn, fn = os.path.split(path) base = fn[:-3] - path = os.path.join(dn, base + '.pyc') - yield path - path = os.path.join(dn, base + '.pyo') - yield path + yield os.path.join(dn, f'{base}.pyc') + yield os.path.join(dn, f'{base}.pyo') def compact(paths): @@ -385,9 +380,7 @@ def remove(self, auto_confirm=False, verbose=False): ) return - dist_name_version = ( - self.dist.project_name + "-" + self.dist.version - ) + dist_name_version = f"{self.dist.project_name}-{self.dist.version}" logger.info('Uninstalling %s:', dist_name_version) with indent_log(): @@ -480,8 +473,9 @@ def from_dist(cls, dist): paths_to_remove = cls(dist) develop_egg_link = egg_link_path(dist) - develop_egg_link_egg_info = '{}.egg-info'.format( - pkg_resources.to_filename(dist.project_name)) + develop_egg_link_egg_info = ( + f'{pkg_resources.to_filename(dist.project_name)}.egg-info' + ) egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) # Special case for distutils installed package distutils_egg_info = getattr(dist._provider, 'path', None) @@ -500,9 +494,6 @@ def from_dist(cls, dist): os.path.join(dist.egg_info, installed_file) ) paths_to_remove.add(path) - # FIXME: need a test for this elif block - # occurs with --single-version-externally-managed/--record outside - # of pip elif dist.has_metadata('top_level.txt'): if dist.has_metadata('namespace_packages.txt'): namespaces = dist.get_metadata('namespace_packages.txt') @@ -514,9 +505,9 @@ def from_dist(cls, dist): if p and p not in namespaces]: path = os.path.join(dist.location, top_level_pkg) paths_to_remove.add(path) - paths_to_remove.add(path + '.py') - paths_to_remove.add(path + '.pyc') - paths_to_remove.add(path + '.pyo') + paths_to_remove.add(f'{path}.py') + paths_to_remove.add(f'{path}.pyc') + paths_to_remove.add(f'{path}.pyo') elif distutils_egg_info: raise UninstallationError( @@ -535,7 +526,7 @@ def from_dist(cls, dist): easy_install_egg = os.path.split(dist.location)[1] easy_install_pth = os.path.join(os.path.dirname(dist.location), 'easy-install.pth') - paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) + paths_to_remove.add_pth(easy_install_pth, f'./{easy_install_egg}') elif egg_info_exists and dist.egg_info.endswith('.dist-info'): for path in uninstallation_paths(dist): @@ -545,11 +536,9 @@ def from_dist(cls, dist): # develop egg with open(develop_egg_link, 'r') as fh: link_pointer = os.path.normcase(fh.readline().strip()) - assert (link_pointer == dist.location), ( - 'Egg-link {} does not match installed location of {} ' - '(at {})'.format( - link_pointer, dist.project_name, dist.location) - ) + assert ( + link_pointer == dist.location + ), f'Egg-link {link_pointer} does not match installed location of {dist.project_name} (at {dist.location})' paths_to_remove.add(develop_egg_link) easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), 'easy-install.pth') @@ -564,13 +553,10 @@ def from_dist(cls, dist): # find distutils scripts= scripts if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): for script in dist.metadata_listdir('scripts'): - if dist_in_usersite(dist): - bin_dir = bin_user - else: - bin_dir = bin_py + bin_dir = bin_user if dist_in_usersite(dist) else bin_py paths_to_remove.add(os.path.join(bin_dir, script)) if WINDOWS: - paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') + paths_to_remove.add(f'{os.path.join(bin_dir, script)}.bat') # find console_scripts _scripts_to_remove = [] @@ -625,10 +611,7 @@ def remove(self): # windows uses '\r\n' with py3k, but uses '\n' with py2.x lines = fh.readlines() self._saved_lines = lines - if any(b'\r\n' in line for line in lines): - endline = '\r\n' - else: - endline = '\n' + endline = '\r\n' if any(b'\r\n' in line for line in lines) else '\n' # handle missing trailing newline if lines and not lines[-1].endswith(endline.encode("utf-8")): lines[-1] = lines[-1] + endline.encode("utf-8") diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/base.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/base.py index 71f3226..1a48e72 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/base.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/base.py @@ -23,7 +23,7 @@ def format_name(project, extras): if not extras: return project canonical_extras = sorted(canonicalize_name(e) for e in extras) - return "{}[{}]".format(project, ",".join(canonical_extras)) + return f'{project}[{",".join(canonical_extras)}]' class Constraint: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/candidates.py index ff2dcf2..b18c7a7 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/candidates.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/candidates.py @@ -42,10 +42,7 @@ def make_install_req_from_link(link, template): # type: (Link, InstallRequirement) -> InstallRequirement assert not template.editable, "template is editable" - if template.req: - line = str(template.req) - else: - line = link.url + line = str(template.req) if template.req else link.url ireq = install_req_from_line( line, user_supplied=template.user_supplied, @@ -191,11 +188,7 @@ def version(self): def format_for_error(self): # type: () -> str - return "{} {} (from {})".format( - self.name, - self.version, - self._link.file_path if self._link.is_file else self._link - ) + return f"{self.name} {self.version} (from {self._link.file_path if self._link.is_file else self._link})" def _prepare_distribution(self): # type: () -> Distribution @@ -454,7 +447,7 @@ def __init__( def __str__(self): # type: () -> str name, rest = str(self.base).split(" ", 1) - return "{}[{}] {}".format(name, ",".join(self.extras), rest) + return f'{name}[{",".join(self.extras)}] {rest}' def __repr__(self): # type: () -> str @@ -492,10 +485,7 @@ def version(self): def format_for_error(self): # type: () -> str - return "{} [{}]".format( - self.base.format_for_error(), - ", ".join(sorted(self.extras)) - ) + return f'{self.base.format_for_error()} [{", ".join(sorted(self.extras))}]' @property def is_installed(self): @@ -535,10 +525,11 @@ def iter_dependencies(self, with_requires): ) for r in self.base.dist.requires(valid_extras): - requirement = factory.make_requirement_from_spec( - str(r), self.base._ireq, valid_extras, - ) - if requirement: + if requirement := factory.make_requirement_from_spec( + str(r), + self.base._ireq, + valid_extras, + ): yield requirement def get_install_requirement(self): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/factory.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/factory.py index 62ee045..a0af700 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/factory.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/factory.py @@ -101,13 +101,14 @@ def __init__( self._installed_candidate_cache = { } # type: Dict[str, AlreadyInstalledCandidate] - if not ignore_installed: - self._installed_dists = { + self._installed_dists = ( + {} + if ignore_installed + else { canonicalize_name(dist.project_name): dist for dist in get_installed_distributions(local_only=False) } - else: - self._installed_dists = {} + ) @property def force_reinstall(self): @@ -126,9 +127,7 @@ def _make_candidate_from_dist( except KeyError: base = AlreadyInstalledCandidate(dist, template, factory=self) self._installed_candidate_cache[dist.key] = base - if extras: - return ExtrasCandidate(base, extras) - return base + return ExtrasCandidate(base, extras) if extras else base def _make_candidate_from_link( self, @@ -172,9 +171,7 @@ def _make_candidate_from_link( return None base = self._link_candidate_cache[link] - if extras: - return ExtrasCandidate(base, extras) - return base + return ExtrasCandidate(base, extras) if extras else base def _iter_found_candidates( self, @@ -270,18 +267,20 @@ def find_candidates( # If none of the requirements want an explicit candidate, we can ask # the finder for candidates. - if not explicit_candidates: - return self._iter_found_candidates( + return ( + ( + c + for c in explicit_candidates + if constraint.is_satisfied_by(c) + and all(req.is_satisfied_by(c) for req in requirements) + ) + if explicit_candidates + else self._iter_found_candidates( ireqs, constraint.specifier, constraint.hashes, prefers_installed, ) - - return ( - c for c in explicit_candidates - if constraint.is_satisfied_by(c) - and all(req.is_satisfied_by(c) for req in requirements) ) def make_requirement_from_install_req(self, ireq, requested_extras): @@ -297,9 +296,7 @@ def make_requirement_from_install_req(self, ireq, requested_extras): if ireq.link.is_wheel: wheel = Wheel(ireq.link.filename) if not wheel.supported(self._finder.target_python.get_tags()): - msg = "{} is not a supported wheel on this platform.".format( - wheel.filename, - ) + msg = f"{wheel.filename} is not a supported wheel on this platform." raise UnsupportedWheel(msg) cand = self._make_candidate_from_link( ireq.link, @@ -381,10 +378,7 @@ def get_dist_to_uninstall(self, candidate): # in virtual environments, so we error out. if running_under_virtualenv() and dist_in_site_packages(dist): raise InstallationError( - "Will not install to the user site because it will " - "lack sys.path precedence to {} in {}".format( - dist.project_name, dist.location, - ) + f"Will not install to the user site because it will lack sys.path precedence to {dist.project_name} in {dist.location}" ) return None @@ -426,10 +420,7 @@ def get_installation_error(self, e): # satisfied. We just report that case. if len(e.causes) == 1: req, parent = e.causes[0] - if parent is None: - req_disp = str(req) - else: - req_disp = f'{req} (from {parent.name})' + req_disp = str(req) if parent is None else f'{req} (from {parent.name})' logger.critical( "Could not find a version that satisfies the requirement %s", req_disp, @@ -467,13 +458,9 @@ def describe_trigger(parent): trigger = describe_trigger(parent) triggers.add(trigger) - if triggers: - info = text_join(sorted(triggers)) - else: - info = "the requested packages" - + info = text_join(sorted(triggers)) if triggers else "the requested packages" msg = "Cannot install {} because these package versions " \ - "have conflicting dependencies.".format(info) + "have conflicting dependencies.".format(info) logger.critical(msg) msg = "\nThe conflict is caused by:" for req, parent in e.causes: @@ -484,14 +471,14 @@ def describe_trigger(parent): parent.version ) else: - msg = msg + "The user requested " + msg = f"{msg}The user requested " msg = msg + req.format_for_error() msg = msg + "\n\n" + \ - "To fix this you could try to:\n" + \ - "1. loosen the range of package versions you've specified\n" + \ - "2. remove package versions to allow pip attempt to solve " + \ - "the dependency conflict\n" + "To fix this you could try to:\n" + \ + "1. loosen the range of package versions you've specified\n" + \ + "2. remove package versions to allow pip attempt to solve " + \ + "the dependency conflict\n" logger.info(msg) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py index 0a2301d..1f8de43 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py @@ -94,8 +94,6 @@ def __len__(self): @functools.lru_cache(maxsize=1) def __bool__(self): # type: () -> bool - if self._prefers_installed and self._installed: - return True - return any(self) + return True if self._prefers_installed and self._installed else any(self) __nonzero__ = __bool__ # XXX: Python 2. diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/provider.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/provider.py index 0221e19..da0bbe4 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/provider.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/provider.py @@ -109,10 +109,7 @@ def _get_restrictive_rating(requirements): ] if any(op in ("==", "===") for op in operators): return 1 - if operators: - return 2 - # A "bare" requirement without any version requirements. - return 3 + return 2 if operators else 3 restrictive = _get_restrictive_rating(req for req, _ in information) transitive = all(parent is not None for _, parent in information) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/requirements.py index 791b571..a267aa4 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/requirements.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/requirements.py @@ -89,7 +89,7 @@ def format_for_error(self): # risking a change in meaning. (Hopefully! Not all edge cases have # been checked) parts = [s.strip() for s in str(self).split(",")] - if len(parts) == 0: + if not parts: return "" elif len(parts) == 1: return parts[0] @@ -102,9 +102,9 @@ def get_candidate_lookup(self): def is_satisfied_by(self, candidate): # type: (Candidate) -> bool - assert candidate.name == self.name, \ - "Internal issue: Candidate is not for this requirement " \ - " {} vs {}".format(candidate.name, self.name) + assert ( + candidate.name == self.name + ), f"Internal issue: Candidate is not for this requirement {candidate.name} vs {self.name}" # We can safely always allow prereleases here since PackageFinder # already implements the prerelease logic, and would have filtered out # prerelease candidates if the user does not expect them. @@ -169,7 +169,7 @@ def __init__(self, name): def __str__(self): # type: () -> str - return "{} (unavailable)".format(self._name) + return f"{self._name} (unavailable)" def __repr__(self): # type: () -> str diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/resolver.py index 63b742b..cb2d5f0 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/resolver.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/resolver.py @@ -83,9 +83,7 @@ def resolve(self, root_reqs, check_supported_wheels): requirements = [] for req in root_reqs: if req.constraint: - # Ensure we only accept valid constraints - problem = check_invalid_constraint_type(req) - if problem: + if problem := check_invalid_constraint_type(req): raise InstallationError(problem) if not req.match_markers(): continue @@ -160,11 +158,10 @@ def resolve(self, root_reqs, check_supported_wheels): ) continue - looks_like_sdist = ( + if looks_like_sdist := ( is_archive_file(candidate.source_link.file_path) and candidate.source_link.ext != ".zip" - ) - if looks_like_sdist: + ): # is a local sdist -- show a deprecation warning! reason = ( "Source distribution is being reinstalled despite an " diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/self_outdated_check.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/self_outdated_check.py index 92f1af7..eaec1ab 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/self_outdated_check.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/self_outdated_check.py @@ -32,8 +32,7 @@ def _get_statefile_name(key): # type: (str) -> str key_bytes = ensure_binary(key) - name = hashlib.sha224(key_bytes).hexdigest() - return name + return hashlib.sha224(key_bytes).hexdigest() class SelfCheckState: @@ -104,9 +103,7 @@ def was_installed_by_pip(pkg): installed by system package manager, such as dnf on Fedora. """ dist = get_distribution(pkg) - if not dist: - return False - return "pip" == get_installer(dist) + return get_installer(dist) == "pip" if dist else False def pip_self_version_check(session, options): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/compat.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/compat.py index 6be9909..531a5b9 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/compat.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/compat.py @@ -76,22 +76,9 @@ def str_to_display(data, desc=None): ) decoded_data = data.decode(encoding, errors="backslashreplace") - # Make sure we can print the output, by encoding it to the output - # encoding with replacement of unencodable characters, and then - # decoding again. - # We use stderr's encoding because it's less likely to be - # redirected and if we don't find an encoding we skip this - # step (on the assumption that output is wrapped by something - # that won't fail). - # The double getattr is to deal with the possibility that we're - # being called in a situation where sys.__stderr__ doesn't exist, - # or doesn't have an encoding attribute. Neither of these cases - # should occur in normal pip use, but there's no harm in checking - # in case people use pip in (unsupported) unusual situations. - output_encoding = getattr(getattr(sys, "__stderr__", None), - "encoding", None) - - if output_encoding: + if output_encoding := getattr( + getattr(sys, "__stderr__", None), "encoding", None + ): output_encoded = decoded_data.encode( output_encoding, errors="backslashreplace" @@ -125,17 +112,12 @@ def get_path_uid(path): fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) file_uid = os.fstat(fd).st_uid os.close(fd) - else: # AIX and Jython - # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW - if not os.path.islink(path): - # older versions of Jython don't have `os.fstat` - file_uid = os.stat(path).st_uid - else: + elif os.path.islink(path): # raise OSError for parity with os.O_NOFOLLOW above - raise OSError( - "{} is a symlink; Will not return uid for symlinks".format( - path) - ) + raise OSError(f"{path} is a symlink; Will not return uid for symlinks") + else: + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid return file_uid diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/compatibility_tags.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/compatibility_tags.py index f12e6c2..a83821d 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/compatibility_tags.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/compatibility_tags.py @@ -30,24 +30,16 @@ def version_info_to_nodot(version_info): def _mac_platforms(arch): - # type: (str) -> List[str] - match = _osx_arch_pat.match(arch) - if match: + if match := _osx_arch_pat.match(arch): name, major, minor, actual_arch = match.groups() mac_version = (int(major), int(minor)) - arches = [ - # Since we have always only checked that the platform starts - # with "macosx", for backwards-compatibility we extract the - # actual prefix provided by the user in case they provided - # something like "macosxcustom_". It may be good to remove - # this as undocumented or deprecate it in the future. - '{}_{}'.format(name, arch[len('macosx_'):]) + return [ + f"{name}_{arch[len('macosx_'):]}" for arch in mac_platforms(mac_version, actual_arch) ] else: # arch pattern didn't match (?!) - arches = [arch] - return arches + return [arch] def _custom_manylinux_platforms(arch): @@ -61,14 +53,14 @@ def _custom_manylinux_platforms(arch): # manylinux2014 wheels: # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels if arch_suffix in {'i686', 'x86_64'}: - arches.append('manylinux2010' + arch_sep + arch_suffix) - arches.append('manylinux1' + arch_sep + arch_suffix) + arches.append(f'manylinux2010{arch_sep}{arch_suffix}') + arches.append(f'manylinux1{arch_sep}{arch_suffix}') elif arch_prefix == 'manylinux2010': # manylinux1 wheels run on most manylinux2010 systems with the # exception of wheels depending on ncurses. PEP 571 states # manylinux1 wheels should be considered manylinux2010 wheels: # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels - arches.append('manylinux1' + arch_sep + arch_suffix) + arches.append(f'manylinux1{arch_sep}{arch_suffix}') return arches @@ -76,12 +68,11 @@ def _get_custom_platforms(arch): # type: (str) -> List[str] arch_prefix, arch_sep, arch_suffix = arch.partition('_') if arch.startswith('macosx'): - arches = _mac_platforms(arch) + return _mac_platforms(arch) elif arch_prefix in ['manylinux2014', 'manylinux2010']: - arches = _custom_manylinux_platforms(arch) + return _custom_manylinux_platforms(arch) else: - arches = [arch] - return arches + return [arch] def _expand_allowed_platforms(platforms): @@ -140,10 +131,7 @@ def get_supported( """ supported = [] # type: List[Tag] - python_version = None # type: Optional[PythonVersion] - if version is not None: - python_version = _get_python_version(version) - + python_version = _get_python_version(version) if version is not None else None interpreter = _get_custom_interpreter(impl, version) platforms = _expand_allowed_platforms(platforms) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/deprecation.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/deprecation.py index a419c50..4b0950a 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/deprecation.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/deprecation.py @@ -29,20 +29,20 @@ class PipDeprecationWarning(Warning): # Warnings <-> Logging Integration def _showwarning(message, category, filename, lineno, file=None, line=None): - if file is not None: - if _original_showwarning is not None: - _original_showwarning( - message, category, filename, lineno, file, line, - ) - elif issubclass(category, PipDeprecationWarning): + if ( + file is not None + and _original_showwarning is not None + or file is None + and not issubclass(category, PipDeprecationWarning) + ): + _original_showwarning( + message, category, filename, lineno, file, line, + ) + elif file is None: # We use a specially named logger which will handle all of the # deprecation messages for pip. logger = logging.getLogger("pip._internal.deprecations") logger.warning(message) - else: - _original_showwarning( - message, category, filename, lineno, file, line, - ) def install_warning_logger(): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/direct_url_helpers.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/direct_url_helpers.py index 9aeb74a..0fa2a4c 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/direct_url_helpers.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/direct_url_helpers.py @@ -26,12 +26,10 @@ def direct_url_as_pep440_direct_reference(direct_url, name): # type: (DirectUrl, str) -> str """Convert a DirectUrl to a pip requirement string.""" direct_url.validate() # if invalid, this is a pip bug - requirement = name + " @ " + requirement = f"{name} @ " fragments = [] if isinstance(direct_url.info, VcsInfo): - requirement += "{}+{}@{}".format( - direct_url.info.vcs, direct_url.url, direct_url.info.commit_id - ) + requirement += f"{direct_url.info.vcs}+{direct_url.url}@{direct_url.info.commit_id}" elif isinstance(direct_url.info, ArchiveInfo): requirement += direct_url.url if direct_url.info.hash: @@ -40,7 +38,7 @@ def direct_url_as_pep440_direct_reference(direct_url, name): assert isinstance(direct_url.info, DirInfo) requirement += direct_url.url if direct_url.subdirectory: - fragments.append("subdirectory=" + direct_url.subdirectory) + fragments.append(f"subdirectory={direct_url.subdirectory}") if fragments: requirement += "#" + "&".join(fragments) return requirement @@ -85,10 +83,7 @@ def direct_url_from_link(link, source_dir=None, link_is_in_wheel_cache=False): subdirectory=link.subdirectory_fragment, ) else: - hash = None - hash_name = link.hash_name - if hash_name: - hash = f"{hash_name}={link.hash}" + hash = f"{hash_name}={link.hash}" if (hash_name := link.hash_name) else None return DirectUrl( url=link.url_without_fragment, info=ArchiveInfo(hash=hash), diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/distutils_args.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/distutils_args.py index dd433fa..cc3e4a2 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/distutils_args.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/distutils_args.py @@ -44,5 +44,5 @@ def parse_distutils_args(args): # exhaustive. pass else: - result.update(match.__dict__) + result |= match.__dict__ return result diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/encoding.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/encoding.py index 9ecb81c..c6d9731 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/encoding.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/encoding.py @@ -31,7 +31,7 @@ def auto_decode(data): return data[len(bom):].decode(encoding) # Lets check the first two lines as in PEP263 for line in data.split(b'\n')[:2]: - if line[0:1] == b'#' and ENCODING_RE.search(line): + if line[:1] == b'#' and ENCODING_RE.search(line): result = ENCODING_RE.search(line) assert result is not None encoding = result.groups()[0].decode('ascii') diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/filesystem.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/filesystem.py index 2a706ab..6a870b2 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/filesystem.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/filesystem.py @@ -32,17 +32,15 @@ def check_path_owner(path): previous = None while path != previous: if os.path.lexists(path): - # Check if path is writable by current user. - if os.geteuid() == 0: - # Special handling for root user in order to handle properly - # cases where users use sudo without -H flag. - try: - path_uid = get_path_uid(path) - except OSError: - return False - return path_uid == 0 - else: + if os.geteuid() != 0: return os.access(path, os.W_OK) + # Special handling for root user in order to handle properly + # cases where users use sudo without -H flag. + try: + path_uid = get_path_uid(path) + except OSError: + return False + return path_uid == 0 else: previous, path = path, os.path.dirname(path) return False # assume we don't own the path @@ -175,9 +173,7 @@ def find_files(path, pattern): def file_size(path): # type: (str) -> Union[int, float] # If it's a symlink, return 0. - if os.path.islink(path): - return 0 - return os.path.getsize(path) + return 0 if os.path.islink(path) else os.path.getsize(path) def format_file_size(path): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/filetypes.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/filetypes.py index bf4bd75..a9629d8 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/filetypes.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/filetypes.py @@ -21,6 +21,4 @@ def is_archive_file(name): # type: (str) -> bool """Return True if `name` is a considered as an archive file.""" ext = splitext(name)[1].lower() - if ext in ARCHIVE_EXTENSIONS: - return True - return False + return ext in ARCHIVE_EXTENSIONS diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/glibc.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/glibc.py index f4b68b3..74f2acc 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/glibc.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/glibc.py @@ -90,7 +90,4 @@ def libc_ver(): in case the lookup fails. """ glibc_version = glibc_version_string() - if glibc_version is None: - return ("", "") - else: - return ("glibc", glibc_version) + return ("", "") if glibc_version is None else ("glibc", glibc_version) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/hashes.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/hashes.py index 16906ed..fcc528e 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/hashes.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/hashes.py @@ -50,11 +50,11 @@ def __and__(self, other): return other # Otherwise only hashes that present in both objects are allowed. - new = {} - for alg, values in other._allowed.items(): - if alg not in self._allowed: - continue - new[alg] = [v for v in values if v in self._allowed[alg]] + new = { + alg: [v for v in values if v in self._allowed[alg]] + for alg, values in other._allowed.items() + if alg in self._allowed + } return Hashes(new) @property @@ -126,9 +126,11 @@ def __bool__(self): def __eq__(self, other): # type: (object) -> bool - if not isinstance(other, Hashes): - return NotImplemented - return self._allowed == other._allowed + return ( + self._allowed == other._allowed + if isinstance(other, Hashes) + else NotImplemented + ) def __hash__(self): # type: () -> int diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/logging.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/logging.py index ad8f947..70d14d9 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/logging.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/logging.py @@ -104,10 +104,7 @@ def get_message_start(self, formatted, levelno): # Then the message already has a prefix. We don't want it to # look like "WARNING: DEPRECATION: ...." return '' - if levelno < logging.ERROR: - return 'WARNING: ' - - return 'ERROR: ' + return 'WARNING: ' if levelno < logging.ERROR else 'ERROR: ' def format(self, record): """ @@ -118,9 +115,7 @@ def format(self, record): message_start = self.get_message_start(formatted, record.levelno) formatted = message_start + formatted - prefix = '' - if self.add_timestamp: - prefix = f"{self.formatTime(record)} " + prefix = f"{self.formatTime(record)} " if self.add_timestamp else '' prefix += " " * get_indentation() formatted = "".join([ prefix + line @@ -170,8 +165,9 @@ def should_color(self): return False real_stream = ( - self.stream if not isinstance(self.stream, colorama.AnsiToWin32) - else self.stream.wrapped + self.stream.wrapped + if isinstance(self.stream, colorama.AnsiToWin32) + else self.stream ) # If the stream is a tty we should color it @@ -179,11 +175,7 @@ def should_color(self): return True # If we have an ANSI term we should color it - if os.environ.get("TERM") == "ANSI": - return True - - # If anything else we should not color it - return False + return os.environ.get("TERM") == "ANSI" def format(self, record): msg = logging.StreamHandler.format(self, record) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/misc.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/misc.py index 856bc6c..7e0ea0d 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/misc.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/misc.py @@ -71,11 +71,7 @@ def get_pip_version(): pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") pip_pkg_dir = os.path.abspath(pip_pkg_dir) - return ( - 'pip {} from {} (python {})'.format( - __version__, pip_pkg_dir, get_major_minor_version(), - ) - ) + return f'pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})' def normalize_version_info(py_version_info): @@ -105,20 +101,15 @@ def ensure_dir(path): os.makedirs(path) except OSError as e: # Windows can raise spurious ENOTEMPTY errors. See #6426. - if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: + if e.errno not in [errno.EEXIST, errno.ENOTEMPTY]: raise def get_prog(): # type: () -> str - try: + with contextlib.suppress(AttributeError, TypeError, IndexError): prog = os.path.basename(sys.argv[0]) - if prog in ('__main__.py', '-c'): - return f"{sys.executable} -m pip" - else: - return prog - except (AttributeError, TypeError, IndexError): - pass + return f"{sys.executable} -m pip" if prog in ('__main__.py', '-c') else prog return 'pip' @@ -180,7 +171,7 @@ def display_path(path): if possible.""" path = os.path.normcase(os.path.abspath(path)) if path.startswith(os.getcwd() + os.path.sep): - path = '.' + path[len(os.getcwd()):] + path = f'.{path[len(os.getcwd()):]}' return path @@ -209,8 +200,7 @@ def _check_no_input(message): """Raise an error if no input is allowed.""" if os.environ.get('PIP_NO_INPUT'): raise Exception( - 'No input was expected ($PIP_NO_INPUT set); question: {}'.format( - message) + f'No input was expected ($PIP_NO_INPUT set); question: {message}' ) @@ -266,11 +256,11 @@ def format_size(bytes): if bytes > 1000 * 1000: return '{:.1f} MB'.format(bytes / 1000.0 / 1000) elif bytes > 10 * 1000: - return '{} kB'.format(int(bytes / 1000)) + return f'{int(bytes / 1000)} kB' elif bytes > 1000: return '{:.1f} kB'.format(bytes / 1000.0) else: - return '{} bytes'.format(int(bytes)) + return f'{int(bytes)} bytes' def tabulate(rows): @@ -298,18 +288,16 @@ def is_installable_dir(path): if os.path.isfile(setup_py): return True pyproject_toml = os.path.join(path, 'pyproject.toml') - if os.path.isfile(pyproject_toml): - return True - return False + return bool(os.path.isfile(pyproject_toml)) def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): """Yield pieces of data from a file-like object until EOF.""" while True: - chunk = file.read(size) - if not chunk: + if chunk := file.read(size): + yield chunk + else: break - yield chunk def normalize_path(path, resolve_symlinks=True): @@ -319,10 +307,7 @@ def normalize_path(path, resolve_symlinks=True): """ path = os.path.expanduser(path) - if resolve_symlinks: - path = os.path.realpath(path) - else: - path = os.path.abspath(path) + path = os.path.realpath(path) if resolve_symlinks else os.path.abspath(path) return os.path.normcase(path) @@ -348,10 +333,8 @@ def renames(old, new): head, tail = os.path.split(old) if head and tail: - try: + with contextlib.suppress(OSError): os.removedirs(head) - except OSError: - pass def is_local(path): @@ -364,9 +347,11 @@ def is_local(path): Caution: this function assumes the head of path has been normalized with normalize_path. """ - if not running_under_virtualenv(): - return True - return path.startswith(normalize_path(sys.prefix)) + return ( + path.startswith(normalize_path(sys.prefix)) + if running_under_virtualenv() + else True + ) def dist_is_local(dist): @@ -404,7 +389,7 @@ def dist_is_editable(dist): Return True if given Distribution is an editable install. """ for path_item in sys.path: - egg_link = os.path.join(path_item, dist.project_name + '.egg-link') + egg_link = os.path.join(path_item, f'{dist.project_name}.egg-link') if os.path.isfile(egg_link): return True return False @@ -559,7 +544,7 @@ def egg_link_path(dist): sites.append(site_packages) for site in sites: - egglink = os.path.join(site, dist.project_name) + '.egg-link' + egglink = f'{os.path.join(site, dist.project_name)}.egg-link' if os.path.isfile(egglink): return egglink return None @@ -575,8 +560,7 @@ def dist_location(dist): The returned location is normalized (in particular, with symlinks removed). """ - egg_link = egg_link_path(dist) - if egg_link: + if egg_link := egg_link_path(dist): return normalize_path(egg_link) return normalize_path(dist.location) @@ -706,14 +690,7 @@ def split_auth_from_netloc(netloc): # behaves if more than one @ is present (which can be checked using # the password attribute of urlsplit()'s return value). auth, netloc = netloc.rsplit('@', 1) - if ':' in auth: - # Split from the left because that's how urllib.parse.urlsplit() - # behaves if more than one : is present (which again can be checked - # using the password attribute of the return value) - user_pass = auth.split(':', 1) - else: - user_pass = auth, None - + user_pass = auth.split(':', 1) if ':' in auth else (auth, None) user_pass = tuple( None if x is None else urllib.parse.unquote(x) for x in user_pass ) @@ -818,12 +795,7 @@ def __str__(self): # This is useful for testing. def __eq__(self, other): # type: (Any) -> bool - if type(self) != type(other): - return False - - # The string being used for redaction doesn't also have to match, - # just the raw, original string. - return (self.secret == other.secret) + return False if type(self) != type(other) else (self.secret == other.secret) def hide_value(value): @@ -846,24 +818,20 @@ def protect_pip_from_modification_on_windows(modifying_pip): """ pip_names = [ "pip.exe", - "pip{}.exe".format(sys.version_info[0]), - "pip{}.{}.exe".format(*sys.version_info[:2]) + f"pip{sys.version_info[0]}.exe", + "pip{}.{}.exe".format(*sys.version_info[:2]), ] - # See https://github.com/pypa/pip/issues/1299 for more discussion - should_show_use_python_msg = ( - modifying_pip and - WINDOWS and - os.path.basename(sys.argv[0]) in pip_names - ) - - if should_show_use_python_msg: + if should_show_use_python_msg := ( + modifying_pip + and WINDOWS + and os.path.basename(sys.argv[0]) in pip_names + ): new_command = [ sys.executable, "-m", "pip" ] + sys.argv[1:] raise CommandError( - 'To modify pip, please run the following command:\n{}' - .format(" ".join(new_command)) + f'To modify pip, please run the following command:\n{" ".join(new_command)}' ) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/models.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/models.py index 6d6fc63..a79dd2f 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/models.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/models.py @@ -35,7 +35,8 @@ def __eq__(self, other): return self._compare(other, operator.__eq__) def _compare(self, other, method): - if not isinstance(other, self._defining_class): - return NotImplemented - - return method(self._compare_key, other._compare_key) + return ( + method(self._compare_key, other._compare_key) + if isinstance(other, self._defining_class) + else NotImplemented + ) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/setuptools_build.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/setuptools_build.py index ae4196e..5f64a4f 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/setuptools_build.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/setuptools_build.py @@ -168,11 +168,7 @@ def make_setuptools_install_args( if use_user_site: args += ["--user", "--prefix="] - if pycompile: - args += ["--compile"] - else: - args += ["--no-compile"] - + args += ["--compile"] if pycompile else ["--no-compile"] if header_dir: args += ["--install-headers", header_dir] diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/subprocess.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/subprocess.py index e6968cc..c87f6f8 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/subprocess.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/subprocess.py @@ -86,7 +86,7 @@ def make_subprocess_output_error( # We know the joined output value ends in a newline. output = ''.join(lines) - msg = ( + return ( # Use a unicode string to avoid "UnicodeEncodeError: 'ascii' # codec can't encode character ..." in Python 2 when a format # argument (e.g. `output`) has a non-ascii character. @@ -102,7 +102,6 @@ def make_subprocess_output_error( output=output, divider=LOG_DIVIDER, ) - return msg def call_subprocess( @@ -179,11 +178,10 @@ def call_subprocess( env.pop(name, None) try: proc = subprocess.Popen( - # Convert HiddenText objects to the underlying str. reveal_command_args(cmd), stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE, + stderr=subprocess.PIPE if stdout_only else subprocess.STDOUT, cwd=cwd, env=env, ) @@ -263,9 +261,7 @@ def call_subprocess( proc.returncode, cwd, ) - elif on_returncode == 'ignore': - pass - else: + elif on_returncode != 'ignore': raise ValueError('Invalid value: on_returncode={!r}'.format( on_returncode)) return output diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/temp_dir.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/temp_dir.py index 920ebed..e4bf1ae 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/temp_dir.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/temp_dir.py @@ -124,15 +124,7 @@ def __init__( super().__init__() if delete is _default: - if path is not None: - # If we were given an explicit directory, resolve delete option - # now. - delete = False - else: - # Otherwise, we wait until cleanup and see what - # tempdir_registry says. - delete = None - + delete = False if path is not None else None # The only time we specify path is in for editables where it # is the value of the --src option. if path is None: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/urls.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/urls.py index 0a27428..b285620 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/urls.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/urls.py @@ -11,9 +11,7 @@ def get_url_scheme(url): # type: (str) -> Optional[str] - if ':' not in url: - return None - return url.split(':', 1)[0].lower() + return None if ':' not in url else url.split(':', 1)[0].lower() def path_to_url(path): @@ -23,8 +21,7 @@ def path_to_url(path): quoted path parts. """ path = os.path.normpath(os.path.abspath(path)) - url = urllib.parse.urljoin('file:', urllib.request.pathname2url(path)) - return url + return urllib.parse.urljoin('file:', urllib.request.pathname2url(path)) def url_to_path(url): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/wheel.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/wheel.py index abef239..3bd5033 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/wheel.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/utils/wheel.py @@ -41,11 +41,7 @@ def get_metadata(self, name): return super().get_metadata(name) except UnicodeDecodeError as e: # Augment the default error with the origin of the file. - raise UnsupportedWheel( - "Error decoding metadata for {}: {}".format( - self._wheel_name, e - ) - ) + raise UnsupportedWheel(f"Error decoding metadata for {self._wheel_name}: {e}") def pkg_resources_distribution_for_wheel(wheel_zip, name, location): @@ -74,9 +70,7 @@ def pkg_resources_distribution_for_wheel(wheel_zip, name, location): wheel_zip, full_path ) except UnsupportedWheel as e: - raise UnsupportedWheel( - "{} has an invalid wheel, {}".format(name, str(e)) - ) + raise UnsupportedWheel(f"{name} has an invalid wheel, {str(e)}") metadata = WheelMetadata(metadata_text, location) @@ -97,9 +91,7 @@ def parse_wheel(wheel_zip, name): metadata = wheel_metadata(wheel_zip, info_dir) version = wheel_version(metadata) except UnsupportedWheel as e: - raise UnsupportedWheel( - "{} has an invalid wheel, {}".format(name, str(e)) - ) + raise UnsupportedWheel(f"{name} has an invalid wheel, {str(e)}") check_compatibility(version, name) @@ -123,9 +115,7 @@ def wheel_dist_info_dir(source, name): if len(info_dirs) > 1: raise UnsupportedWheel( - "multiple .dist-info directories found: {}".format( - ", ".join(info_dirs) - ) + f'multiple .dist-info directories found: {", ".join(info_dirs)}' ) info_dir = info_dirs[0] @@ -208,8 +198,7 @@ def check_compatibility(version, name): """ if version[0] > VERSION_COMPATIBLE[0]: raise UnsupportedWheel( - "{}'s Wheel-Version ({}) is not compatible with this version " - "of pip".format(name, '.'.join(map(str, version))) + f"{name}'s Wheel-Version ({'.'.join(map(str, version))}) is not compatible with this version of pip" ) elif version > VERSION_COMPATIBLE: logger.warning( diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/bazaar.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/bazaar.py index 102102a..ede2a5b 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/bazaar.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/bazaar.py @@ -77,7 +77,7 @@ def get_url_rev_and_auth(cls, url): # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it url, rev, user_pass = super().get_url_rev_and_auth(url) if url.startswith('ssh://'): - url = 'bzr+' + url + url = f'bzr+{url}' return url, rev, user_pass @classmethod @@ -92,9 +92,7 @@ def get_remote_url(cls, location): 'parent branch: '): if line.startswith(x): repo = line.split(x)[1] - if cls._is_local_repository(repo): - return path_to_url(repo) - return repo + return path_to_url(repo) if cls._is_local_repository(repo) else repo raise RemoteNotFoundError @classmethod diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/git.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/git.py index ac4dedd..0587d64 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/git.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/git.py @@ -110,16 +110,13 @@ def get_current_branch(cls, location): ) ref = output.strip() - if ref.startswith('refs/heads/'): - return ref[len('refs/heads/'):] - - return None + return ref[len('refs/heads/'):] if ref.startswith('refs/heads/') else None def export(self, location, url): # type: (str, HiddenText) -> None """Export the Git repository at the url to the destination location""" if not location.endswith('/'): - location = location + '/' + location = f'{location}/' with TempDirectory(kind="export") as temp_dir: self.unpack(temp_dir.path, url=url) @@ -181,15 +178,7 @@ def _should_fetch(cls, dest, rev): # Always fetch remote refs. return True - if not looks_like_hash(rev): - # Git fetch would fail with abbreviated commits. - return False - - if cls.has_commit(dest, rev): - # Don't fetch if we have the commit locally. - return False - - return True + return not cls.has_commit(dest, rev) if looks_like_hash(rev) else False @classmethod def resolve_revision(cls, dest, url, rev_options): @@ -245,11 +234,7 @@ def is_commit_id_equal(cls, dest, name): dest: the repository directory. name: a string name. """ - if not name: - # Then avoid an unnecessary subprocess call. - return False - - return cls.get_revision(dest) == name + return cls.get_revision(dest) == name if name else False def fetch_new(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None @@ -345,7 +330,7 @@ def has_commit(cls, location, rev): """ try: cls.run_command( - ['rev-parse', '-q', '--verify', "sha^" + rev], + ['rev-parse', '-q', '--verify', f"sha^{rev}"], cwd=location, log_failed_cmd=False, ) @@ -430,8 +415,7 @@ def update_submodules(cls, location): @classmethod def get_repository_root(cls, location): - loc = super().get_repository_root(location) - if loc: + if loc := super().get_repository_root(location): return loc try: r = cls.run_command( diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/mercurial.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/mercurial.py index b27d6d7..f207aae 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/mercurial.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/mercurial.py @@ -104,13 +104,12 @@ def get_revision(cls, location): """ Return the repository-local changeset revision number, as an integer. """ - current_revision = cls.run_command( + return cls.run_command( ['parents', '--template={rev}'], show_stdout=False, stdout_only=True, cwd=location, ).strip() - return current_revision @classmethod def get_requirement_revision(cls, location): @@ -118,13 +117,12 @@ def get_requirement_revision(cls, location): Return the changeset identification hash, as a 40-character hexadecimal string """ - current_rev_hash = cls.run_command( + return cls.run_command( ['parents', '--template={node}'], show_stdout=False, stdout_only=True, cwd=location, ).strip() - return current_rev_hash @classmethod def is_commit_id_equal(cls, dest, name): @@ -147,8 +145,7 @@ def get_subdirectory(cls, location): @classmethod def get_repository_root(cls, location): - loc = super().get_repository_root(location) - if loc: + if loc := super().get_repository_root(location): return loc try: r = cls.run_command( diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/subversion.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/subversion.py index 2953b06..953c917 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/subversion.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/subversion.py @@ -71,7 +71,7 @@ def get_revision(cls, location): dirurl, localrev = cls._get_svn_url_rev(base) if base == location: - base = dirurl + '/' # save the root url + base = f'{dirurl}/' elif not dirurl or not dirurl.startswith(base): dirs[:] = [] continue # not part of the same svn tree, skip it @@ -97,7 +97,7 @@ def get_url_rev_and_auth(cls, url): # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it url, rev, user_pass = super().get_url_rev_and_auth(url) if url.startswith('ssh://'): - url = 'svn+' + url + url = f'svn+{url}' return url, rev, user_pass @staticmethod @@ -182,11 +182,7 @@ def _get_svn_url_rev(cls, location): except InstallationError: url, revs = None, [] - if revs: - rev = max(revs) - else: - rev = 0 - + rev = max(revs) if revs else 0 return url, rev @classmethod @@ -289,10 +285,7 @@ def get_remote_call_options(self): # e.g. RHEL/CentOS 7, which is supported until 2024, ships with # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip # can't safely add the option if the SVN version is < 1.8 (or unknown). - if svn_version >= (1, 8): - return ['--force-interactive'] - - return [] + return ['--force-interactive'] if svn_version >= (1, 8) else [] def export(self, location, url): # type: (str, HiddenText) -> None diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/versioncontrol.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/versioncontrol.py index d7644a0..7576c30 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/versioncontrol.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/vcs/versioncontrol.py @@ -145,10 +145,7 @@ def __repr__(self): @property def arg_rev(self): # type: () -> Optional[str] - if self.rev is None: - return self.vc_class.default_arg_rev - - return self.rev + return self.vc_class.default_arg_rev if self.rev is None else self.rev def to_args(self): # type: () -> CommandArgs @@ -165,10 +162,7 @@ def to_args(self): def to_display(self): # type: () -> str - if not self.rev: - return '' - - return f' (to revision {self.rev})' + return f' (to revision {self.rev})' if self.rev else '' def make_new(self, rev): # type: (str) -> RevOptions @@ -258,10 +252,14 @@ def get_backend_for_scheme(self, scheme): """ Return a VersionControl object or None. """ - for vcs_backend in self._registry.values(): - if scheme in vcs_backend.schemes: - return vcs_backend - return None + return next( + ( + vcs_backend + for vcs_backend in self._registry.values() + if scheme in vcs_backend.schemes + ), + None, + ) def get_backend(self, name): # type: (str) -> Optional[VersionControl] @@ -332,10 +330,9 @@ def get_src_requirement(cls, repo_dir, project_name): revision = cls.get_requirement_revision(repo_dir) subdir = cls.get_subdirectory(repo_dir) - req = make_vcs_requirement_url(repo_url, revision, project_name, - subdir=subdir) - - return req + return make_vcs_requirement_url( + repo_url, revision, project_name, subdir=subdir + ) @staticmethod def get_base_rev_args(rev): @@ -461,9 +458,7 @@ def get_url_rev_options(self, url): """ secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret) username, secret_password = user_pass - password = None # type: Optional[HiddenText] - if secret_password is not None: - password = hide_value(secret_password) + password = hide_value(secret_password) if secret_password is not None else None extra_args = self.make_rev_args(username, password) rev_options = self.make_rev_options(rev, extra_args=extra_args) @@ -592,8 +587,7 @@ def obtain(self, dest, url): self.name, url, ) - response = ask_path_exists('What to do? {}'.format( - prompt[0]), prompt[1]) + response = ask_path_exists(f'What to do? {prompt[0]}', prompt[1]) if response == 'a': sys.exit(-1) @@ -717,6 +711,4 @@ def get_repository_root(cls, location): This can do more than is_repository_directory() alone. For example, the Git override checks that Git is actually available. """ - if cls.is_repository_directory(location): - return location - return None + return location if cls.is_repository_directory(location) else None diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/wheel_builder.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/wheel_builder.py index 8ec9086..cdf25fb 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/wheel_builder.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_internal/wheel_builder.py @@ -136,17 +136,12 @@ def _should_cache( assert req.source_dir vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) assert vcs_backend - if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): - return True - return False - + return bool( + vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir) + ) assert req.link base, ext = req.link.splitext() - if _contains_egg_info(base): - return True - - # Otherwise, do not cache. - return False + return bool(_contains_egg_info(base)) def _get_cache_dir( @@ -179,7 +174,7 @@ def _get_metadata_version(dist): try: return Version(value) except InvalidVersion: - msg = "Invalid Metadata-Version: {}".format(value) + msg = f"Invalid Metadata-Version: {value}" raise UnsupportedWheel(msg) raise UnsupportedWheel("Missing Metadata-Version") @@ -338,10 +333,9 @@ def build( build_successes, build_failures = [], [] for req in requirements: cache_dir = _get_cache_dir(req, wheel_cache) - wheel_file = _build_one( + if wheel_file := _build_one( req, cache_dir, verify, build_options, global_options - ) - if wheel_file: + ): # Update the link for this. req.link = Link(path_to_url(wheel_file)) req.local_file_path = req.link.file_path diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/appdirs.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/appdirs.py index 4fac8e8..3572b3d 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/appdirs.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/appdirs.py @@ -78,20 +78,21 @@ def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/". """ - if system == "win32": + if system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + elif system == "win32": if appauthor is None: appauthor = appname - const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + const = "CSIDL_APPDATA" if roaming else "CSIDL_LOCAL_APPDATA" path = os.path.normpath(_get_win_folder(const)) if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - elif system == 'darwin': - path = os.path.expanduser('~/Library/Application Support/') - if appname: - path = os.path.join(path, appname) + path = ( + os.path.join(path, appauthor, appname) + if appauthor is not False + else os.path.join(path, appname) + ) else: path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) if appname: @@ -132,19 +133,20 @@ def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ - if system == "win32": + if system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + elif system == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) - elif system == 'darwin': - path = os.path.expanduser('/Library/Application Support') - if appname: - path = os.path.join(path, appname) + path = ( + os.path.join(path, appauthor, appname) + if appauthor is not False + else os.path.join(path, appname) + ) else: # XDG default for $XDG_DATA_DIRS # only first, if multipath is False @@ -156,10 +158,7 @@ def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): appname = os.path.join(appname, version) pathlist = [os.path.join(x, appname) for x in pathlist] - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] + path = os.pathsep.join(pathlist) if multipath else pathlist[0] return path if appname and version: @@ -254,10 +253,7 @@ def site_config_dir(appname=None, appauthor=None, version=None, multipath=False) appname = os.path.join(appname, version) pathlist = [os.path.join(x, appname) for x in pathlist] - if multipath: - path = os.pathsep.join(pathlist) - else: - path = pathlist[0] + path = os.pathsep.join(pathlist) if multipath else pathlist[0] return path @@ -294,7 +290,11 @@ def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option. """ - if system == "win32": + if system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + elif system == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) @@ -303,16 +303,13 @@ def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): if not PY3 and isinstance(path, unicode): path = _win_path_to_bytes(path) if appname: - if appauthor is not False: - path = os.path.join(path, appauthor, appname) - else: - path = os.path.join(path, appname) + path = ( + os.path.join(path, appauthor, appname) + if appauthor is not False + else os.path.join(path, appname) + ) if opinion: path = os.path.join(path, "Cache") - elif system == 'darwin': - path = os.path.expanduser('~/Library/Caches') - if appname: - path = os.path.join(path, appname) else: path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) if appname: @@ -496,13 +493,7 @@ def _get_win_folder_with_pywin32(csidl_name): try: dir = unicode(dir) - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in dir: - if ord(c) > 255: - has_high_char = True - break + has_high_char = any(ord(c) > 255 for c in dir) if has_high_char: try: import win32api @@ -526,13 +517,7 @@ def _get_win_folder_with_ctypes(csidl_name): buf = ctypes.create_unicode_buffer(1024) ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in buf: - if ord(c) > 255: - has_high_char = True - break + has_high_char = any(ord(c) > 255 for c in buf) if has_high_char: buf2 = ctypes.create_unicode_buffer(1024) if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): @@ -551,13 +536,7 @@ def _get_win_folder_with_jna(csidl_name): shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) dir = jna.Native.toString(buf.tostring()).rstrip("\0") - # Downgrade to short path name if have highbit chars. See - # . - has_high_char = False - for c in dir: - if ord(c) > 255: - has_high_char = True - break + has_high_char = any(ord(c) > 255 for c in dir) if has_high_char: buf = array.zeros('c', buf_size) kernel = win32.Kernel32.INSTANCE @@ -610,24 +589,24 @@ def _win_path_to_bytes(path): "site_data_dir", "site_config_dir") - print("-- app dirs %s --" % __version__) + print(f"-- app dirs {__version__} --") print("-- app dirs (with optional 'version')") dirs = AppDirs(appname, appauthor, version="1.0") for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) + print(f"{prop}: {getattr(dirs, prop)}") print("\n-- app dirs (without optional 'version')") dirs = AppDirs(appname, appauthor) for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) + print(f"{prop}: {getattr(dirs, prop)}") print("\n-- app dirs (without optional 'appauthor')") dirs = AppDirs(appname) for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) + print(f"{prop}: {getattr(dirs, prop)}") print("\n-- app dirs (with disabled 'appauthor')") dirs = AppDirs(appname, appauthor=False) for prop in props: - print("%s: %s" % (prop, getattr(dirs, prop))) + print(f"{prop}: {getattr(dirs, prop)}") diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/adapter.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/adapter.py index 2a3b7ab..6e757c9 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/adapter.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -50,9 +50,7 @@ def send(self, request, cacheable_methods=None, **kw): # check for etags and add headers if appropriate request.headers.update(self.controller.conditional_headers(request)) - resp = super(CacheControlAdapter, self).send(request, **kw) - - return resp + return super(CacheControlAdapter, self).send(request, **kw) def build_response( self, request, response, from_cache=False, cacheable_methods=None diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/controller.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/controller.py index bbb4a9f..eb06f88 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/controller.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/controller.py @@ -44,7 +44,7 @@ def _urlnorm(cls, uri): """Normalize the URL to create a safe key for the cache""" (scheme, authority, path, query, fragment) = parse_uri(uri) if not scheme or not authority: - raise Exception("Only absolute URIs are allowed. uri = %s" % uri) + raise Exception(f"Only absolute URIs are allowed. uri = {uri}") scheme = scheme.lower() authority = authority.lower() @@ -55,9 +55,7 @@ def _urlnorm(cls, uri): # Could do syntax based normalization of the URI before # computing the digest. See Section 6.2.2 of Std 66. request_uri = query and "?".join([path, query]) or path - defrag_uri = scheme + "://" + authority + request_uri - - return defrag_uri + return f"{scheme}://{authority}{request_uri}" @classmethod def cache_url(cls, uri): @@ -360,11 +358,11 @@ def update_cached_response(self, request, response): excluded_headers = ["content-length"] cached_response.headers.update( - dict( - (k, v) + { + k: v for k, v in response.headers.items() if k.lower() not in excluded_headers - ) + } ) # we want a 200 b/c we have content via the cache diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/heuristics.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/heuristics.py index 6b8d320..c3eb97b 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/heuristics.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/heuristics.py @@ -40,9 +40,7 @@ def update_headers(self, response): return {} def apply(self, response): - updated_headers = self.update_headers(response) - - if updated_headers: + if updated_headers := self.update_headers(response): response.headers.update(updated_headers) warning_header_value = self.warning(response) if warning_header_value is not None: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/serialize.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/serialize.py index 61e6590..0ad881c 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/serialize.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -45,21 +45,20 @@ def dumps(self, request, response, body=None): # doesn't know the difference. Forcing these to unicode will be # enough to have msgpack know the difference. data = { - u"response": { + "response": { u"body": body, - u"headers": dict( - (text_type(k), text_type(v)) for k, v in response.headers.items() - ), + u"headers": { + text_type(k): text_type(v) for k, v in response.headers.items() + }, u"status": response.status, u"version": response.version, u"reason": text_type(response.reason), u"strict": response.strict, u"decode_content": response.decode_content, - } + }, + "vary": {}, } - # Construct our vary headers - data[u"vary"] = {} if u"vary" in response_headers: varied_headers = response_headers[u"vary"].split(",") for header in varied_headers: @@ -94,7 +93,7 @@ def loads(self, request, data): # Dispatch to the actual load method for the given version try: - return getattr(self, "_loads_v{}".format(ver))(request, data) + return getattr(self, f"_loads_v{ver}")(request, data) except AttributeError: # This is a version we don't have a loads function for, so we'll @@ -161,15 +160,15 @@ def _loads_v2(self, request, data): # We need to decode the items that we've base64 encoded cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) - cached["response"]["headers"] = dict( - (_b64_decode_str(k), _b64_decode_str(v)) + cached["response"]["headers"] = { + _b64_decode_str(k): _b64_decode_str(v) for k, v in cached["response"]["headers"].items() - ) + } cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"]) - cached["vary"] = dict( - (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) + cached["vary"] = { + _b64_decode_str(k): _b64_decode_str(v) if v is not None else v for k, v in cached["vary"].items() - ) + } return self.prepare_response(request, cached) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/__init__.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/__init__.py index 8dabe48..e799d16 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/__init__.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/__init__.py @@ -33,8 +33,9 @@ def detect(byte_str): """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): - raise TypeError('Expected object of type bytes or bytearray, got: ' - '{}'.format(type(byte_str))) + raise TypeError( + f'Expected object of type bytes or bytearray, got: {type(byte_str)}' + ) else: byte_str = bytearray(byte_str) detector = UniversalDetector() @@ -51,8 +52,9 @@ def detect_all(byte_str): """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): - raise TypeError('Expected object of type bytes or bytearray, got: ' - '{}'.format(type(byte_str))) + raise TypeError( + f'Expected object of type bytes or bytearray, got: {type(byte_str)}' + ) else: byte_str = bytearray(byte_str) @@ -68,16 +70,18 @@ def detect_all(byte_str): lower_charset_name = prober.charset_name.lower() # Use Windows encoding name instead of ISO-8859 if we saw any # extra Windows-specific bytes - if lower_charset_name.startswith('iso-8859'): - if detector._has_win_bytes: - charset_name = detector.ISO_WIN_MAP.get(lower_charset_name, - charset_name) + if ( + lower_charset_name.startswith('iso-8859') + and detector._has_win_bytes + ): + charset_name = detector.ISO_WIN_MAP.get(lower_charset_name, + charset_name) results.append({ 'encoding': charset_name, 'confidence': prober.get_confidence(), 'language': prober.language, }) - if len(results) > 0: + if results: return sorted(results, key=lambda result: -result['confidence']) return [detector.result] diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/chardistribution.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/chardistribution.py index e5509a0..abdb115 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/chardistribution.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/chardistribution.py @@ -69,17 +69,12 @@ def reset(self): def feed(self, char, char_len): """feed a character with known length""" - if char_len == 2: - # we only care about 2-bytes character in our distribution analysis - order = self.get_order(char) - else: - order = -1 + order = self.get_order(char) if char_len == 2 else -1 if order >= 0: self._total_chars += 1 # order is valid - if order < self._table_size: - if 512 > self._char_to_freq_order[order]: - self._freq_chars += 1 + if order < self._table_size and self._char_to_freq_order[order] < 512: + self._freq_chars += 1 def get_confidence(self): """return confidence based on existing data""" @@ -123,10 +118,11 @@ def get_order(self, byte_str): # second byte range: 0xa1 -- 0xfe # no validation needed here. State machine has done that first_char = byte_str[0] - if first_char >= 0xC4: - return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 - else: - return -1 + return ( + 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 + if first_char >= 0xC4 + else -1 + ) class EUCKRDistributionAnalysis(CharDistributionAnalysis): @@ -142,10 +138,11 @@ def get_order(self, byte_str): # second byte range: 0xa1 -- 0xfe # no validation needed here. State machine has done that first_char = byte_str[0] - if first_char >= 0xB0: - return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 - else: - return -1 + return ( + 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 + if first_char >= 0xB0 + else -1 + ) class GB2312DistributionAnalysis(CharDistributionAnalysis): @@ -180,13 +177,12 @@ def get_order(self, byte_str): # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe # no validation needed here. State machine has done that first_char, second_char = byte_str[0], byte_str[1] - if first_char >= 0xA4: - if second_char >= 0xA1: - return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 - else: - return 157 * (first_char - 0xA4) + second_char - 0x40 - else: + if first_char < 0xA4: return -1 + if second_char >= 0xA1: + return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 + else: + return 157 * (first_char - 0xA4) + second_char - 0x40 class SJISDistributionAnalysis(CharDistributionAnalysis): @@ -227,7 +223,4 @@ def get_order(self, byte_str): # second byte range: 0xa1 -- 0xfe # no validation needed here. State machine has done that char = byte_str[0] - if char >= 0xA0: - return 94 * (char - 0xA1) + byte_str[1] - 0xa1 - else: - return -1 + return 94 * (char - 0xA1) + byte_str[1] - 0xa1 if char >= 0xA0 else -1 diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/charsetgroupprober.py index 0bf81d5..4089518 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/charsetgroupprober.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/charsetgroupprober.py @@ -50,17 +50,17 @@ def reset(self): def charset_name(self): if not self._best_guess_prober: self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.charset_name + return ( + self._best_guess_prober.charset_name + if self._best_guess_prober + else None + ) @property def language(self): if not self._best_guess_prober: self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.language + return self._best_guess_prober.language if self._best_guess_prober else None def feed(self, byte_str): for prober in self.probers: @@ -102,6 +102,4 @@ def get_confidence(self): if best_conf < conf: best_conf = conf self._best_guess_prober = prober - if not self._best_guess_prober: - return 0.0 - return best_conf + return best_conf if self._best_guess_prober else 0.0 diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/cli/chardetect.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/cli/chardetect.py index f890f93..9f2deb5 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/cli/chardetect.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/cli/chardetect.py @@ -44,10 +44,9 @@ def description_of(lines, name='stdin'): if PY2: name = name.decode(sys.getfilesystemencoding(), 'ignore') if result['encoding']: - return '{}: {} with confidence {}'.format(name, result['encoding'], - result['confidence']) + return f"{name}: {result['encoding']} with confidence {result['confidence']}" else: - return '{}: no result'.format(name) + return f'{name}: no result' def main(argv=None): @@ -67,8 +66,9 @@ def main(argv=None): (default: stdin)', type=argparse.FileType('rb'), nargs='*', default=[sys.stdin if PY2 else sys.stdin.buffer]) - parser.add_argument('--version', action='version', - version='%(prog)s {}'.format(__version__)) + parser.add_argument( + '--version', action='version', version=f'%(prog)s {__version__}' + ) args = parser.parse_args(argv) for f in args.input: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/escprober.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/escprober.py index c52060d..fbf8ad7 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/escprober.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/escprober.py @@ -75,10 +75,7 @@ def language(self): return self._detected_language def get_confidence(self): - if self._detected_charset: - return 0.99 - else: - return 0.00 + return 0.99 if self._detected_charset else 0.00 def feed(self, byte_str): for c in byte_str: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/eucjpprober.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/eucjpprober.py index a81ee1e..c5c76f1 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/eucjpprober.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/eucjpprober.py @@ -79,10 +79,11 @@ def feed(self, byte_str): self._last_char[0] = byte_str[-1] - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT + if self.state == ProbingState.DETECTING and ( + self.context_analyzer.got_enough_data() + and (self.get_confidence() > self.SHORTCUT_THRESHOLD) + ): + self._state = ProbingState.FOUND_IT return self.state diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/hebrewprober.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/hebrewprober.py index 10b8122..6861001 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/hebrewprober.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/hebrewprober.py @@ -239,12 +239,10 @@ def feed(self, byte_str): # case (2) [-2:not space][-1:Non-Final letter][ # cur:space] self._final_char_visual_score += 1 - else: - # Not standing on a space - if ((self._before_prev == ' ') and + elif ((self._before_prev == ' ') and (self.is_final(self._prev)) and (cur != ' ')): - # case (3) [-2:space][-1:final letter][cur:not space] - self._final_char_visual_score += 1 + # case (3) [-2:space][-1:final letter][cur:not space] + self._final_char_visual_score += 1 self._before_prev = self._prev self._prev = cur @@ -272,12 +270,7 @@ def charset_name(self): # Still no good, back to final letter distance, maybe it'll save the # day. - if finalsub < 0.0: - return self.VISUAL_HEBREW_NAME - - # (finalsub > 0 - Logical) or (don't know what to do) default to - # Logical. - return self.LOGICAL_HEBREW_NAME + return self.VISUAL_HEBREW_NAME if finalsub < 0.0 else self.LOGICAL_HEBREW_NAME @property def language(self): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/latin1prober.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/latin1prober.py index 7c37520..d16cec0 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/latin1prober.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/latin1prober.py @@ -137,9 +137,5 @@ def get_confidence(self): else: confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) / total) - if confidence < 0.0: - confidence = 0.0 - # lower the confidence of latin1 so that other more accurate - # detector can take priority. - confidence = confidence * 0.73 - return confidence + confidence = max(confidence, 0.0) + return confidence * 0.73 diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/mbcharsetprober.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/mbcharsetprober.py index 4609154..34e288c 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/mbcharsetprober.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/mbcharsetprober.py @@ -80,10 +80,11 @@ def feed(self, byte_str): self._last_char[0] = byte_str[-1] - if self.state == ProbingState.DETECTING: - if (self.distribution_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT + if self.state == ProbingState.DETECTING and ( + self.distribution_analyzer.got_enough_data() + and (self.get_confidence() > self.SHORTCUT_THRESHOLD) + ): + self._state = ProbingState.FOUND_IT return self.state diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/sbcharsetprober.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/sbcharsetprober.py index 186c0da..1333cf5 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/sbcharsetprober.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/sbcharsetprober.py @@ -110,27 +110,30 @@ def feed(self, byte_str): self._freq_char += 1 if self._last_order < self.SAMPLE_SIZE: self._total_seqs += 1 - if not self._reversed: - lm_cat = language_model[self._last_order][order] - else: - lm_cat = language_model[order][self._last_order] + lm_cat = ( + language_model[order][self._last_order] + if self._reversed + else language_model[self._last_order][order] + ) self._seq_counters[lm_cat] += 1 self._last_order = order charset_name = self._model.charset_name - if self.state == ProbingState.DETECTING: - if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: - confidence = self.get_confidence() - if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, we have a winner', - charset_name, confidence) - self._state = ProbingState.FOUND_IT - elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, below negative ' - 'shortcut threshhold %s', charset_name, - confidence, - self.NEGATIVE_SHORTCUT_THRESHOLD) - self._state = ProbingState.NOT_ME + if ( + self.state == ProbingState.DETECTING + and self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD + ): + confidence = self.get_confidence() + if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, we have a winner', + charset_name, confidence) + self._state = ProbingState.FOUND_IT + elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: + self.logger.debug('%s confidence = %s, below negative ' + 'shortcut threshhold %s', charset_name, + confidence, + self.NEGATIVE_SHORTCUT_THRESHOLD) + self._state = ProbingState.NOT_ME return self.state diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/sjisprober.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/sjisprober.py index 683add0..aaadc32 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/sjisprober.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/sjisprober.py @@ -79,10 +79,11 @@ def feed(self, byte_str): self._last_char[0] = byte_str[-1] - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT + if self.state == ProbingState.DETECTING and ( + self.context_analyzer.got_enough_data() + and (self.get_confidence() > self.SHORTCUT_THRESHOLD) + ): + self._state = ProbingState.FOUND_IT return self.state diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/universaldetector.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/universaldetector.py index 177a415..9c415f3 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/universaldetector.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/universaldetector.py @@ -233,13 +233,11 @@ def close(self): if not self._got_data: self.logger.debug('no data received!') - # Default to ASCII if it is all we've seen so far elif self._input_state == InputState.PURE_ASCII: self.result = {'encoding': 'ascii', 'confidence': 1.0, 'language': ''} - # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD elif self._input_state == InputState.HIGH_BYTE: prober_confidence = None max_prober_confidence = 0.0 @@ -257,30 +255,34 @@ def close(self): confidence = max_prober.get_confidence() # Use Windows encoding name instead of ISO-8859 if we saw any # extra Windows-specific bytes - if lower_charset_name.startswith('iso-8859'): - if self._has_win_bytes: - charset_name = self.ISO_WIN_MAP.get(lower_charset_name, - charset_name) + if ( + lower_charset_name.startswith('iso-8859') + and self._has_win_bytes + ): + charset_name = self.ISO_WIN_MAP.get(lower_charset_name, + charset_name) self.result = {'encoding': charset_name, 'confidence': confidence, 'language': max_prober.language} # Log all prober confidences if none met MINIMUM_THRESHOLD - if self.logger.getEffectiveLevel() <= logging.DEBUG: - if self.result['encoding'] is None: - self.logger.debug('no probers hit minimum threshold') - for group_prober in self._charset_probers: - if not group_prober: - continue - if isinstance(group_prober, CharSetGroupProber): - for prober in group_prober.probers: - self.logger.debug('%s %s confidence = %s', - prober.charset_name, - prober.language, - prober.get_confidence()) - else: + if ( + self.logger.getEffectiveLevel() <= logging.DEBUG + and self.result['encoding'] is None + ): + self.logger.debug('no probers hit minimum threshold') + for group_prober in self._charset_probers: + if not group_prober: + continue + if isinstance(group_prober, CharSetGroupProber): + for prober in group_prober.probers: self.logger.debug('%s %s confidence = %s', - group_prober.charset_name, - group_prober.language, - group_prober.get_confidence()) + prober.charset_name, + prober.language, + prober.get_confidence()) + else: + self.logger.debug('%s %s confidence = %s', + group_prober.charset_name, + group_prober.language, + group_prober.get_confidence()) return self.result diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/utf8prober.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/utf8prober.py index 4573267..8aec64c 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/utf8prober.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/chardet/utf8prober.py @@ -67,16 +67,17 @@ def feed(self, byte_str): if self.coding_sm.get_current_charlen() >= 2: self._num_mb_chars += 1 - if self.state == ProbingState.DETECTING: - if self.get_confidence() > self.SHORTCUT_THRESHOLD: - self._state = ProbingState.FOUND_IT + if ( + self.state == ProbingState.DETECTING + and self.get_confidence() > self.SHORTCUT_THRESHOLD + ): + self._state = ProbingState.FOUND_IT return self.state def get_confidence(self): unlike = 0.99 - if self._num_mb_chars < 6: - unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars - return 1.0 - unlike - else: + if self._num_mb_chars >= 6: return unlike + unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars + return 1.0 - unlike diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/ansi.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/ansi.py index d4b1353..c698d1c 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/ansi.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/ansi.py @@ -13,7 +13,7 @@ def code_to_chars(code): return CSI + str(code) + 'm' def set_title(title): - return OSC + '2;' + title + BEL + return f'{OSC}2;{title}{BEL}' def clear_screen(mode=2): return CSI + str(mode) + 'J' diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/ansitowin32.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/ansitowin32.py index cb7893e..1017cbf 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/ansitowin32.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/ansitowin32.py @@ -8,9 +8,7 @@ from .win32 import windll, winapi_test -winterm = None -if windll is not None: - winterm = WinTerm() +winterm = WinTerm() if windll is not None else None class StreamWrapper(object): @@ -42,9 +40,12 @@ def write(self, text): def isatty(self): stream = self.__wrapped - if 'PYCHARM_HOSTED' in os.environ: - if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): - return True + if ( + 'PYCHARM_HOSTED' in os.environ + and stream is not None + and (stream is sys.__stdout__ or stream is sys.__stderr__) + ): + return True try: stream_isatty = stream.isatty except AttributeError: @@ -155,7 +156,7 @@ def get_win32_calls(self): AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), } - return dict() + return {} def write(self, text): if self.strip or self.convert: @@ -210,7 +211,7 @@ def extract_params(self, command, paramstring): params = params + (1,) else: params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) - if len(params) == 0: + if not params: # defaults: if command in 'JKm': params = (0,) @@ -247,12 +248,11 @@ def convert_osc(self, text): start, end = match.span() text = text[:start] + text[end:] paramstring, command = match.groups() - if command == BEL: - if paramstring.count(";") == 1: - params = paramstring.split(";") - # 0 - change title and icon (we will only change title) - # 1 - change icon (we don't support this) - # 2 - change title - if params[0] in '02': - winterm.set_title(params[1]) + if command == BEL and paramstring.count(";") == 1: + params = paramstring.split(";") + # 0 - change title and icon (we will only change title) + # 1 - change icon (we don't support this) + # 2 - change title + if params[0] in '02': + winterm.set_title(params[1]) return text diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/winterm.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/winterm.py index 3234116..6acf524 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/winterm.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/colorama/winterm.py @@ -77,9 +77,7 @@ def style(self, style=None, on_stderr=False): def set_console(self, attrs=None, on_stderr=False): if attrs is None: attrs = self.get_attrs() - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR + handle = win32.STDERR if on_stderr else win32.STDOUT win32.SetConsoleTextAttribute(handle, attrs) def get_position(self, handle): @@ -95,26 +93,17 @@ def set_cursor_position(self, position=None, on_stderr=False): # I'm not currently tracking the position, so there is no default. # position = self.get_position() return - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR + handle = win32.STDERR if on_stderr else win32.STDOUT win32.SetConsoleCursorPosition(handle, position) def cursor_adjust(self, x, y, on_stderr=False): - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR + handle = win32.STDERR if on_stderr else win32.STDOUT position = self.get_position(handle) adjusted_position = (position.Y + y, position.X + x) win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) def erase_screen(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the screen. - # 1 should clear from the cursor to the beginning of the screen. - # 2 should clear the entire screen, and move cursor to (1,1) - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR + handle = win32.STDERR if on_stderr else win32.STDOUT csbi = win32.GetConsoleScreenBufferInfo(handle) # get the number of character cells in the current buffer cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y @@ -141,12 +130,7 @@ def erase_screen(self, mode=0, on_stderr=False): win32.SetConsoleCursorPosition(handle, (1, 1)) def erase_line(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the line. - # 1 should clear from the cursor to the beginning of the line. - # 2 should clear the entire line. - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR + handle = win32.STDERR if on_stderr else win32.STDOUT csbi = win32.GetConsoleScreenBufferInfo(handle) if mode == 0: from_coord = csbi.dwCursorPosition diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/contextlib2.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/contextlib2.py index 35858c1..2dd9d72 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/contextlib2.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/contextlib2.py @@ -362,9 +362,7 @@ def _reraise_with_existing_context(exc_details): # Need to handle old-style context managers on Python 2 def _get_type(obj): obj_type = type(obj) - if obj_type is InstanceType: - return obj.__class__ # Old-style class - return obj_type # New-style class + return obj.__class__ if obj_type is InstanceType else obj_type # Inspired by discussions on http://bugs.python.org/issue13585 diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/compat.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/compat.py index 4843835..3663453 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/compat.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/compat.py @@ -56,9 +56,7 @@ def splituser(host): import re _userprog = re.compile('^(.*)@(.*)$') - match = _userprog.match(host) - if match: return match.group(1, 2) - return None, host + return match.group(1, 2) if (match := _userprog.match(host)) else (None, host) else: # pragma: no cover from io import StringIO @@ -114,7 +112,8 @@ def _dnsname_match(dn, hostname, max_wildcards=1): # policy among SSL implementations showed it to be a # reasonable choice. raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) + f"too many wildcards in certificate DNS name: {repr(dn)}" + ) # speed up common case w/o wildcards if not wildcards: @@ -138,9 +137,7 @@ def _dnsname_match(dn, hostname, max_wildcards=1): pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) # add the remaining fragments, ignore any wildcards - for frag in remainder: - pats.append(re.escape(frag)) - + pats.extend(re.escape(frag) for frag in remainder) pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) return pat.match(hostname) @@ -224,10 +221,7 @@ def _access_check(fn, mode): # than referring to PATH directories. This includes checking relative to the # current directory, e.g. ./script if os.path.dirname(cmd): - if _access_check(cmd, mode): - return cmd - return None - + return cmd if _access_check(cmd, mode) else None if path is None: path = os.environ.get("PATH", os.defpath) if not path: @@ -236,7 +230,7 @@ def _access_check(fn, mode): if sys.platform == "win32": # The current directory takes precedence on Windows. - if not os.curdir in path: + if os.curdir not in path: path.insert(0, os.curdir) # PATHEXT is necessary to check on Windows. @@ -257,7 +251,7 @@ def _access_check(fn, mode): seen = set() for dir in path: normdir = os.path.normcase(dir) - if not normdir in seen: + if normdir not in seen: seen.add(normdir) for thefile in files: name = os.path.join(dir, thefile) @@ -307,9 +301,7 @@ def python_implementation(): return 'PyPy' if os.name == 'java': return 'Jython' - if sys.version.startswith('IronPython'): - return 'IronPython' - return 'CPython' + return 'IronPython' if sys.version.startswith('IronPython') else 'CPython' try: import sysconfig @@ -347,8 +339,7 @@ def fsencode(filename): elif isinstance(filename, text_type): return filename.encode(_fsencoding, _fserrors) else: - raise TypeError("expect bytes or str, not %s" % - type(filename).__name__) + raise TypeError(f"expect bytes or str, not {type(filename).__name__}") def fsdecode(filename): if isinstance(filename, text_type): @@ -356,8 +347,7 @@ def fsdecode(filename): elif isinstance(filename, bytes): return filename.decode(_fsencoding, _fserrors) else: - raise TypeError("expect bytes or str, not %s" % - type(filename).__name__) + raise TypeError(f"expect bytes or str, not {type(filename).__name__}") try: from tokenize import detect_encoding @@ -429,7 +419,7 @@ def find_cookie(line): except LookupError: # This behaviour mimics the Python interpreter if filename is None: - msg = "unknown encoding: " + encoding + msg = f"unknown encoding: {encoding}" else: msg = "unknown encoding for {!r}: {}".format(filename, encoding) @@ -463,10 +453,7 @@ def find_cookie(line): return default, [first] encoding = find_cookie(second) - if encoding: - return encoding, [first, second] - - return default, [first, second] + return (encoding, [first, second]) if encoding else (default, [first, second]) # For converting & <-> & etc. try: @@ -623,10 +610,7 @@ def cache_from_source(path, debug_override=None): assert path.endswith('.py') if debug_override is None: debug_override = __debug__ - if debug_override: - suffix = 'c' - else: - suffix = 'o' + suffix = 'c' if debug_override else 'o' return path + suffix try: @@ -787,10 +771,7 @@ def update(*args, **kwds): elif not args: raise TypeError('update() takes at least 1 argument (0 given)') self = args[0] - # Make progressively weaker assumptions about "other" - other = () - if len(args) == 2: - other = args[1] + other = args[1] if len(args) == 2 else () if isinstance(other, dict): for key in other: self[key] = other[key] @@ -835,9 +816,11 @@ def __repr__(self, _repr_running=None): return '...' _repr_running[call_key] = 1 try: - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) + return ( + '%s(%r)' % (self.__class__.__name__, self.items()) + if self + else f'{self.__class__.__name__}()' + ) finally: del _repr_running[call_key] @@ -899,10 +882,10 @@ def viewitems(self): def valid_ident(s): - m = IDENTIFIER.match(s) - if not m: + if m := IDENTIFIER.match(s): + return True + else: raise ValueError('Not a valid Python identifier: %r' % s) - return True # The ConvertingXXX classes are wrappers around standard Python containers, @@ -944,11 +927,13 @@ def get(self, key, default=None): def pop(self, key, default=None): value = dict.pop(self, key, default) result = self.configurator.convert(value) - if value is not result: - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key + if value is not result and type(result) in ( + ConvertingDict, + ConvertingList, + ConvertingTuple, + ): + result.parent = self + result.key = key return result class ConvertingList(list): @@ -968,10 +953,12 @@ def __getitem__(self, key): def pop(self, idx=-1): value = list.pop(self, idx) result = self.configurator.convert(value) - if value is not result: - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self + if value is not result and type(result) in ( + ConvertingDict, + ConvertingList, + ConvertingTuple, + ): + result.parent = self return result class ConvertingTuple(tuple): @@ -979,11 +966,13 @@ class ConvertingTuple(tuple): def __getitem__(self, key): value = tuple.__getitem__(self, key) result = self.configurator.convert(value) - if value is not result: - if type(result) in (ConvertingDict, ConvertingList, - ConvertingTuple): - result.parent = self - result.key = key + if value is not result and type(result) in ( + ConvertingDict, + ConvertingList, + ConvertingTuple, + ): + result.parent = self + result.key = key return result class BaseConfigurator(object): @@ -1020,7 +1009,7 @@ def resolve(self, s): try: found = self.importer(used) for frag in name: - used += '.' + frag + used += f'.{frag}' try: found = getattr(found, frag) except AttributeError: @@ -1043,31 +1032,30 @@ def cfg_convert(self, value): m = self.WORD_PATTERN.match(rest) if m is None: raise ValueError("Unable to convert %r" % value) - else: - rest = rest[m.end():] - d = self.config[m.groups()[0]] + rest = rest[m.end():] + d = self.config[m.groups()[0]] #print d, rest - while rest: - m = self.DOT_PATTERN.match(rest) + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) if m: - d = d[m.groups()[0]] - else: - m = self.INDEX_PATTERN.match(rest) - if m: - idx = m.groups()[0] - if not self.DIGIT_PATTERN.match(idx): + idx = m.groups()[0] + if self.DIGIT_PATTERN.match(idx): + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: d = d[idx] - else: - try: - n = int(idx) # try as number first (most likely) - d = d[n] - except TypeError: - d = d[idx] - if m: - rest = rest[m.end():] - else: - raise ValueError('Unable to convert ' - '%r at %r' % (value, rest)) + else: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) #rest should be empty return d @@ -1084,16 +1072,14 @@ def convert(self, value): value = ConvertingList(value) value.configurator = self elif not isinstance(value, ConvertingTuple) and\ - isinstance(value, tuple): + isinstance(value, tuple): value = ConvertingTuple(value) value.configurator = self elif isinstance(value, string_types): - m = self.CONVERT_PATTERN.match(value) - if m: + if m := self.CONVERT_PATTERN.match(value): d = m.groupdict() prefix = d['prefix'] - converter = self.value_converters.get(prefix, None) - if converter: + if converter := self.value_converters.get(prefix, None): suffix = d['suffix'] converter = getattr(self, converter) value = converter(suffix) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/database.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/database.py index 0b2cf32..b7e3726 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/database.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/database.py @@ -170,10 +170,10 @@ def _generate_cache(self): else: self._cache_egg.add(dist) - if gen_dist: - self._cache.generated = True - if gen_egg: - self._cache_egg.generated = True + if gen_dist: + self._cache.generated = True + if gen_egg: + self._cache_egg.generated = True @classmethod def distinfo_dirname(cls, name, version): @@ -207,17 +207,13 @@ def get_distributions(self): :class:`EggInfoDistribution` instances """ if not self._cache_enabled: - for dist in self._yield_distributions(): - yield dist + yield from self._yield_distributions() else: self._generate_cache() - for dist in self._cache.path.values(): - yield dist - + yield from self._cache.path.values() if self._include_egg: - for dist in self._cache_egg.path.values(): - yield dist + yield from self._cache_egg.path.values() def get_distribution(self, name): """ @@ -262,7 +258,7 @@ def provides_distribution(self, name, version=None): matcher = None if version is not None: try: - matcher = self._scheme.matcher('%s (%s)' % (name, version)) + matcher = self._scheme.matcher(f'{name} ({version})') except ValueError: raise DistlibException('invalid name or version: %r, %r' % (name, version)) @@ -281,10 +277,9 @@ def provides_distribution(self, name, version=None): if p_name == name: yield dist break - else: - if p_name == name and matcher.match(p_ver): - yield dist - break + elif p_name == name and matcher.match(p_ver): + yield dist + break def get_file_path(self, name, relative_path): """ @@ -306,12 +301,10 @@ def get_exported_entries(self, category, name=None): r = dist.exports if category in r: d = r[category] - if name is not None: - if name in d: - yield d[name] - else: - for v in d.values(): - yield v + if name is None: + yield from d.values() + elif name in d: + yield d[name] class Distribution(object): @@ -363,7 +356,7 @@ def name_and_version(self): """ A utility property which displays the name and version in parentheses. """ - return '%s (%s)' % (self.name, self.version) + return f'{self.name} ({self.version})' @property def provides(self): @@ -372,7 +365,7 @@ def provides(self): :return: A set of "name (version)" strings. """ plist = self.metadata.provides - s = '%s (%s)' % (self.name, self.version) + s = f'{self.name} ({self.version})' if s not in plist: plist.append(s) return plist @@ -431,22 +424,17 @@ def matches_requirement(self, req): p_name, p_ver = parse_name_and_version(p) if p_name != name: continue - try: + with contextlib.suppress(UnsupportedVersionError): result = matcher.match(p_ver) break - except UnsupportedVersionError: - pass return result def __repr__(self): """ Return a textual representation of this instance, """ - if self.source_url: - suffix = ' [%s]' % self.source_url - else: - suffix = '' - return '' % (self.name, self.version, suffix) + suffix = f' [{self.source_url}]' if self.source_url else '' + return f'' def __eq__(self, other): """ @@ -456,13 +444,15 @@ def __eq__(self, other): version and source_url. :return: True if it is the same, else False. """ - if type(other) is not type(self): - result = False - else: - result = (self.name == other.name and - self.version == other.version and - self.source_url == other.source_url) - return result + return ( + False + if type(other) is not type(self) + else ( + self.name == other.name + and self.version == other.version + and self.source_url == other.source_url + ) + ) def __hash__(self): """ @@ -520,10 +510,10 @@ def get_hash(self, data, hasher=None): prefix = '' else: hasher = getattr(hashlib, hasher) - prefix = '%s=' % self.hasher + prefix = f'{self.hasher}=' digest = hasher(data).digest() digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') - return '%s%s' % (prefix, digest) + return f'{prefix}{digest}' class InstalledDistribution(BaseInstalledDistribution): @@ -540,7 +530,7 @@ def __init__(self, path, metadata=None, env=None): self.modules = [] self.finder = finder = resources.finder_for_path(path) if finder is None: - raise ValueError('finder unavailable for %s' % path) + raise ValueError(f'finder unavailable for {path}') if env and env._cache_enabled and path in env._cache.path: metadata = env._cache.path[path].metadata elif metadata is None: @@ -552,8 +542,7 @@ def __init__(self, path, metadata=None, env=None): if r is None: r = finder.find(LEGACY_METADATA_FILENAME) if r is None: - raise ValueError('no %s found in %s' % (METADATA_FILENAME, - path)) + raise ValueError(f'no {METADATA_FILENAME} found in {path}') with contextlib.closing(r.as_stream()) as stream: metadata = Metadata(fileobj=stream, scheme='legacy') @@ -575,7 +564,7 @@ def __repr__(self): self.name, self.version, self.path) def __str__(self): - return "%s %s" % (self.name, self.version) + return f"{self.name} {self.version}" def _get_records(self): """ @@ -592,7 +581,7 @@ def _get_records(self): #base_location = os.path.dirname(self.path) #base_location = os.path.abspath(base_location) for row in record_reader: - missing = [None for i in range(len(row), 3)] + missing = [None for _ in range(len(row), 3)] path, checksum, size = row + missing #if not os.path.isabs(path): # path = path.replace('/', os.sep) @@ -608,11 +597,11 @@ def exports(self): of :class:`ExportEntry` instances describing the individual export entries, and keyed by name. """ - result = {} - r = self.get_distinfo_resource(EXPORTS_FILENAME) - if r: - result = self.read_exports() - return result + return ( + self.read_exports() + if (r := self.get_distinfo_resource(EXPORTS_FILENAME)) + else {} + ) def read_exports(self): """ @@ -623,8 +612,7 @@ def read_exports(self): export entries. """ result = {} - r = self.get_distinfo_resource(EXPORTS_FILENAME) - if r: + if r := self.get_distinfo_resource(EXPORTS_FILENAME): with contextlib.closing(r.as_stream()) as stream: result = read_exports(stream) return result @@ -667,8 +655,7 @@ def list_installed_files(self): :returns: iterator of (path, hash, size) """ - for result in self._get_records(): - yield result + yield from self._get_records() def write_installed_files(self, paths, prefix, dry_run=False): """ @@ -729,11 +716,7 @@ def check_installed_files(self): if size and actual_size != size: mismatches.append((path, 'size', size, actual_size)) elif hash_value: - if '=' in hash_value: - hasher = hash_value.split('=', 1)[0] - else: - hasher = None - + hasher = hash_value.split('=', 1)[0] if '=' in hash_value else None with open(path, 'rb') as f: actual_hash = self.get_hash(f.read(), hasher) if actual_hash != hash_value: @@ -784,10 +767,8 @@ def write_shared_locations(self, paths, dry_run=False): for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): path = paths[key] if os.path.isdir(paths[key]): - lines.append('%s=%s' % (key, path)) - for ns in paths.get('namespace', ()): - lines.append('namespace=%s' % ns) - + lines.append(f'{key}={path}') + lines.extend(f'namespace={ns}' for ns in paths.get('namespace', ())) with codecs.open(shared_path, 'w', encoding='utf-8') as f: f.write('\n'.join(lines)) return shared_path @@ -798,7 +779,7 @@ def get_distinfo_resource(self, path): '%r at %r' % (path, self.path)) finder = resources.finder_for_path(self.path) if finder is None: - raise DistlibException('Unable to get a finder for %s' % self.path) + raise DistlibException(f'Unable to get a finder for {self.path}') return finder.find(path) def get_distinfo_file(self, path): @@ -911,7 +892,7 @@ def parse_requires_data(data): reqs.append(r.name) else: cons = ', '.join('%s%s' % c for c in r.constraints) - reqs.append('%s (%s)' % (r.name, cons)) + reqs.append(f'{r.name} ({cons})') return reqs def parse_requires_path(req_path): @@ -921,11 +902,9 @@ def parse_requires_path(req_path): """ reqs = [] - try: + with contextlib.suppress(IOError): with codecs.open(req_path, 'r', 'utf-8') as fp: reqs = parse_requires_data(fp.read()) - except IOError: - pass return reqs tl_path = tl_data = None @@ -963,14 +942,10 @@ def parse_requires_path(req_path): if requires: metadata.add_requirements(requires) # look for top-level modules in top_level.txt, if present - if tl_data is None: - if tl_path is not None and os.path.exists(tl_path): - with open(tl_path, 'rb') as f: - tl_data = f.read().decode('utf-8') - if not tl_data: - tl_data = [] - else: - tl_data = tl_data.splitlines() + if tl_data is None and tl_path is not None and os.path.exists(tl_path): + with open(tl_path, 'rb') as f: + tl_data = f.read().decode('utf-8') + tl_data = tl_data.splitlines() if tl_data else [] self.modules = tl_data return metadata @@ -979,7 +954,7 @@ def __repr__(self): self.name, self.version, self.path) def __str__(self): - return "%s %s" % (self.name, self.version) + return f"{self.name} {self.version}" def check_installed_files(self): """ @@ -1136,7 +1111,7 @@ def add_missing(self, distribution, requirement): self.missing.setdefault(distribution, []).append(requirement) def _repr_dist(self, dist): - return '%s %s' % (dist.name, dist.version) + return f'{dist.name} {dist.version}' def repr_node(self, dist, level=1): """Prints only a subgraph""" @@ -1144,7 +1119,7 @@ def repr_node(self, dist, level=1): for other, label in self.adjacency_list[dist]: dist = self._repr_dist(other) if label is not None: - dist = '%s [%s]' % (dist, label) + dist = f'{dist} [{label}]' output.append(' ' * level + str(dist)) suboutput = self.repr_node(other, level + 1) subs = suboutput.split('\n') @@ -1167,18 +1142,18 @@ def to_dot(self, f, skip_disconnected=True): if len(adjs) == 0 and not skip_disconnected: disconnected.append(dist) for other, label in adjs: - if not label is None: + if label is not None: f.write('"%s" -> "%s" [label="%s"]\n' % (dist.name, other.name, label)) else: f.write('"%s" -> "%s"\n' % (dist.name, other.name)) - if not skip_disconnected and len(disconnected) > 0: + if not skip_disconnected and disconnected: f.write('subgraph disconnected {\n') f.write('label = "Disconnected"\n') f.write('bgcolor = red\n') for dist in disconnected: - f.write('"%s"' % dist.name) + f.write(f'"{dist.name}"') f.write('\n') f.write('}\n') f.write('}\n') @@ -1193,9 +1168,7 @@ def topological_sort(self): """ result = [] # Make a shallow copy of the adjacency list - alist = {} - for k, v in self.adjacency_list.items(): - alist[k] = v[:] + alist = {k: v[:] for k, v in self.adjacency_list.items()} while True: # See what we can remove in this run to_remove = [] @@ -1209,16 +1182,16 @@ def topological_sort(self): # Remove from the adjacency list of others for k, v in alist.items(): alist[k] = [(d, r) for d, r in v if d not in to_remove] - logger.debug('Moving to result: %s', - ['%s (%s)' % (d.name, d.version) for d in to_remove]) + logger.debug( + 'Moving to result: %s', + [f'{d.name} ({d.version})' for d in to_remove], + ) result.extend(to_remove) return result, list(alist.keys()) def __repr__(self): """Representation of the graph""" - output = [] - for dist, adjs in self.adjacency_list.items(): - output.append(self.repr_node(dist)) + output = [self.repr_node(dist) for dist, adjs in self.adjacency_list.items()] return '\n'.join(output) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/index.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/index.py index 9a18ffc..a7ec6c9 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/index.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/index.py @@ -44,7 +44,7 @@ def __init__(self, url=None): self.read_configuration() scheme, netloc, path, params, query, frag = urlparse(self.url) if params or query or frag or scheme not in ('http', 'https'): - raise DistlibException('invalid repository: %s' % self.url) + raise DistlibException(f'invalid repository: {self.url}') self.password_handler = None self.ssl_verifier = None self.gpg = None @@ -146,7 +146,7 @@ def _reader(self, name, stream, outbuf): break s = s.decode('utf-8').rstrip() outbuf.append(s) - logger.debug('%s: %s' % (name, s)) + logger.debug(f'{name}: {s}') stream.close() def get_sign_command(self, filename, signer, sign_password, @@ -172,7 +172,7 @@ def get_sign_command(self, filename, signer, sign_password, if sign_password is not None: cmd.extend(['--batch', '--passphrase-fd', '0']) td = tempfile.mkdtemp() - sf = os.path.join(td, os.path.basename(filename) + '.asc') + sf = os.path.join(td, f'{os.path.basename(filename)}.asc') cmd.extend(['--detach-sign', '--armor', '--local-user', signer, '--output', sf, filename]) logger.debug('invoking: %s', ' '.join(cmd)) @@ -261,7 +261,7 @@ def upload_file(self, metadata, filename, signer=None, sign_password=None, """ self.check_credentials() if not os.path.exists(filename): - raise DistlibException('not found: %s' % filename) + raise DistlibException(f'not found: {filename}') metadata.validate() d = metadata.todict() sig_file = None @@ -401,7 +401,7 @@ def download_file(self, url, destfile, digest=None, reporthook=None): else: hasher = 'md5' digester = getattr(hashlib, hasher)() - logger.debug('Digest specified: %s' % digest) + logger.debug(f'Digest specified: {digest}') # The following code is equivalent to urlretrieve. # We need to do it this way so that we can compute the # digest of the file as we go. @@ -412,11 +412,9 @@ def download_file(self, url, destfile, digest=None, reporthook=None): try: headers = sfp.info() blocksize = 8192 - size = -1 read = 0 blocknum = 0 - if "content-length" in headers: - size = int(headers["Content-Length"]) + size = int(headers["Content-Length"]) if "content-length" in headers else -1 if reporthook: reporthook(blocknum, blocksize, size) while True: @@ -482,19 +480,27 @@ def encode_request(self, fields, files): values = [values] for v in values: - parts.extend(( + parts.extend( + ( + b'--' + boundary, + f'Content-Disposition: form-data; name="{k}"'.encode( + 'utf-8' + ), + b'', + v.encode('utf-8'), + ) + ) + for key, filename, value in files: + parts.extend( + ( b'--' + boundary, - ('Content-Disposition: form-data; name="%s"' % - k).encode('utf-8'), + f'Content-Disposition: form-data; name="{key}"; filename="{filename}"'.encode( + 'utf-8' + ), b'', - v.encode('utf-8'))) - for key, filename, value in files: - parts.extend(( - b'--' + boundary, - ('Content-Disposition: form-data; name="%s"; filename="%s"' % - (key, filename)).encode('utf-8'), - b'', - value)) + value, + ) + ) parts.extend((b'--' + boundary + b'--', b'')) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/locators.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/locators.py index e10a564..e573abc 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/locators.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/locators.py @@ -245,10 +245,7 @@ def same_project(name1, name2): logger.debug('%s: version hint in fragment: %r', project_name, frag) m = HASHER_HASH.match(frag) - if m: - algo, digest = m.groups() - else: - algo, digest = None, None + algo, digest = m.groups() if m else (None, None) origpath = path if path and path[-1] == '/': # pragma: no cover path = path[:-1] @@ -299,7 +296,7 @@ def same_project(name1, name2): result['python-version'] = pyver break if result and algo: - result['%s_digest' % algo] = digest + result[f'{algo}_digest'] = digest return result def _get_digest(self, info): @@ -319,7 +316,7 @@ def _get_digest(self, info): break if not result: for algo in ('sha256', 'md5'): - key = '%s_digest' % algo + key = f'{algo}_digest' if key in info: result = (algo, info[key]) break @@ -335,14 +332,13 @@ def _update_version_data(self, result, info): version = info.pop('version') if version in result: dist = result[version] - md = dist.metadata else: dist = make_dist(name, version, scheme=self.scheme) - md = dist.metadata + md = dist.metadata dist.digest = digest = self._get_digest(info) url = info['url'] result['digests'][url] = digest - if md.source_url != info['url']: + if md.source_url != url: md.source_url = self.prefer_url(md.source_url, url) result['urls'].setdefault(version, set()).add(url) dist.locator = self @@ -379,15 +375,13 @@ def locate(self, requirement, prereleases=False): try: if not matcher.match(k): logger.debug('%s did not match %r', matcher, k) + elif prereleases or not vcls(k).is_prerelease: + slist.append(k) else: - if prereleases or not vcls(k).is_prerelease: - slist.append(k) - else: - logger.debug('skipping pre-release ' - 'version %s of %s', k, matcher.name) + logger.debug('skipping pre-release ' + 'version %s of %s', k, matcher.name) except Exception: # pragma: no cover logger.warning('error matching %s with %r', matcher, k) - pass # slist.append(k) if len(slist) > 1: slist = sorted(slist, key=scheme.key) if slist: @@ -398,11 +392,8 @@ def locate(self, requirement, prereleases=False): if r.extras: result.extras = r.extras result.download_urls = versions.get('urls', {}).get(version, set()) - d = {} sd = versions.get('digests', {}) - for url in result.download_urls: - if url in sd: # pragma: no cover - d[url] = sd[url] + d = {url: sd[url] for url in result.download_urls if url in sd} result.digests = d self.matcher = None return result @@ -473,7 +464,7 @@ def get_distribution_names(self): def _get_project(self, name): result = {'urls': {}, 'digests': {}} - url = urljoin(self.base_url, '%s/json' % quote(name)) + url = urljoin(self.base_url, f'{quote(name)}/json') try: resp = self.opener.open(url) data = resp.read().decode() # for now @@ -489,7 +480,7 @@ def _get_project(self, name): dist.locator = self urls = d['urls'] result[md.version] = dist - for info in d['urls']: + for info in urls: url = info['url'] dist.download_urls.add(url) dist.digests[url] = self._get_digest(info) @@ -511,14 +502,6 @@ def _get_project(self, name): odist.digests[url] = self._get_digest(info) result['urls'].setdefault(version, set()).add(url) result['digests'][url] = self._get_digest(info) -# for info in urls: -# md.source_url = info['url'] -# dist.digest = self._get_digest(info) -# dist.locator = self -# for info in urls: -# url = info['url'] -# result['urls'].setdefault(md.version, set()).add(url) -# result['digests'][url] = self._get_digest(info) except Exception as e: self.errors.put(text_type(e)) logger.exception('JSON fetch failed: %s', e) @@ -548,8 +531,7 @@ def __init__(self, data, url): """ self.data = data self.base_url = self.url = url - m = self._base.search(self.data) - if m: + if m := self._base.search(self.data): self.base_url = m.group(1) _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) @@ -631,7 +613,7 @@ def _prepare_threads(self): fetching web pages). """ self._threads = [] - for i in range(self.num_workers): + for _ in range(self.num_workers): t = threading.Thread(target=self._fetch) t.setDaemon(True) t.start() @@ -644,7 +626,7 @@ def _wait_threads(self): """ # Note that you need two loops, since you can't say which # thread will get each sentinel - for t in self._threads: + for _ in self._threads: self._to_fetch.put(None) # sentinel for t in self._threads: t.join() @@ -655,7 +637,7 @@ def _get_project(self, name): with self._gplock: self.result = result self.project_name = name - url = urljoin(self.base_url, '%s/' % quote(name)) + url = urljoin(self.base_url, f'{quote(name)}/') self._seen.clear() self._page_cache.clear() self._prepare_threads() @@ -718,10 +700,7 @@ def _should_queue(self, link, referrer, rel): result = False else: host = netloc.split(':', 1)[0] - if host.lower() == 'localhost': - result = False - else: - result = True + result = host.lower() != 'localhost' logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, referrer, result) return result @@ -795,10 +774,7 @@ def get_page(self, url): if encoding: decoder = self.decoders[encoding] # fail if not found data = decoder(data) - encoding = 'utf-8' - m = CHARSET.search(content_type) - if m: - encoding = m.group(1) + encoding = m.group(1) if (m := CHARSET.search(content_type)) else 'utf-8' try: data = data.decode(encoding) except UnicodeError: # pragma: no cover @@ -824,13 +800,10 @@ def get_distribution_names(self): """ Return all the distribution names known to this locator. """ - result = set() - page = self.get_page(self.base_url) - if not page: - raise DistlibException('Unable to get %s' % self.base_url) - for match in self._distname_re.finditer(page.data): - result.add(match.group(1)) - return result + if page := self.get_page(self.base_url): + return {match.group(1) for match in self._distname_re.finditer(page.data)} + else: + raise DistlibException(f'Unable to get {self.base_url}') class DirectoryLocator(Locator): """ @@ -871,8 +844,7 @@ def _get_project(self, name): url = urlunparse(('file', '', pathname2url(os.path.abspath(fn)), '', '', '')) - info = self.convert_url_to_download_info(url, name) - if info: + if info := self.convert_url_to_download_info(url, name): self._update_version_data(result, info) if not self.recursive: break @@ -890,8 +862,7 @@ def get_distribution_names(self): url = urlunparse(('file', '', pathname2url(os.path.abspath(fn)), '', '', '')) - info = self.convert_url_to_download_info(url, None) - if info: + if info := self.convert_url_to_download_info(url, None): result.add(info['name']) if not self.recursive: break @@ -912,8 +883,7 @@ def get_distribution_names(self): def _get_project(self, name): result = {'urls': {}, 'digests': {}} - data = get_project_data(name) - if data: + if data := get_project_data(name): for info in data.get('files', []): if info['ptype'] != 'sdist' or info['pyversion'] != 'source': continue @@ -952,15 +922,15 @@ def __init__(self, distpath, **kwargs): def _get_project(self, name): dist = self.distpath.get_distribution(name) - if dist is None: - result = {'urls': {}, 'digests': {}} - else: - result = { + return ( + {'urls': {}, 'digests': {}} + if dist is None + else { dist.version: dist, - 'urls': {dist.version: set([dist.source_url])}, - 'digests': {dist.version: set([None])} + 'urls': {dist.version: {dist.source_url}}, + 'digests': {dist.version: {None}}, } - return result + ) class AggregatingLocator(Locator): @@ -998,8 +968,7 @@ def _set_scheme(self, value): def _get_project(self, name): result = {} for locator in self.locators: - d = locator.get_project(name) - if d: + if d := locator.get_project(name): if self.merge: files = result.get('urls', {}) digests = result.get('digests', {}) @@ -1026,14 +995,7 @@ def _get_project(self, name): # failure to find foo (>= 2.0), because other locators # weren't searched. Note that this only matters when # merge=False. - if self.matcher is None: - found = True - else: - found = False - for k in d: - if self.matcher.match(k): - found = True - break + found = True if self.matcher is None else any(self.matcher.match(k) for k in d) if found: result = d break @@ -1180,7 +1142,7 @@ def try_to_replace(self, provider, other, problems): # can't replace other with provider problems.add(('cantreplace', provider, other, frozenset(unmatched))) - result = False + return False else: # can replace other with provider self.remove_distribution(other) @@ -1188,8 +1150,7 @@ def try_to_replace(self, provider, other, problems): for s in rlist: self.reqts.setdefault(provider, set()).add(s) self.add_distribution(provider) - result = True - return result + return True def find(self, requirement, meta_extras=None, prereleases=False): """ @@ -1226,7 +1187,7 @@ def find(self, requirement, meta_extras=None, prereleases=False): if ':*:' in meta_extras: meta_extras.remove(':*:') # :meta: and :run: are implicitly included - meta_extras |= set([':test:', ':build:', ':dev:']) + meta_extras |= {':test:', ':build:', ':dev:'} if isinstance(requirement, Distribution): dist = odist = requirement @@ -1239,8 +1200,8 @@ def find(self, requirement, meta_extras=None, prereleases=False): logger.debug('located %s', odist) dist.requested = True problems = set() - todo = set([dist]) - install_dists = set([odist]) + todo = {dist} + install_dists = {odist} while todo: dist = todo.pop() name = dist.key # case-insensitive @@ -1257,9 +1218,9 @@ def find(self, requirement, meta_extras=None, prereleases=False): ereqts = set() if meta_extras and dist in install_dists: for key in ('test', 'build', 'dev'): - e = ':%s:' % key + e = f':{key}:' if e in meta_extras: - ereqts |= getattr(dist, '%s_requires' % key) + ereqts |= getattr(dist, f'{key}_requires') all_reqts = ireqts | sreqts | ereqts for r in all_reqts: providers = self.find_providers(r) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/manifest.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/manifest.py index 92688d0..d0ba730 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/manifest.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/manifest.py @@ -324,11 +324,7 @@ def _translate_pattern(self, pattern, anchor=True, prefix=None, or just returned as-is (assumes it's a regex object). """ if is_regex: - if isinstance(pattern, str): - return re.compile(pattern) - else: - return pattern - + return re.compile(pattern) if isinstance(pattern, str) else pattern if _PYTHON_VERSION > (3, 2): # ditch start and end characters start, _, end = self._glob_to_re('_').partition('_') @@ -354,19 +350,16 @@ def _translate_pattern(self, pattern, anchor=True, prefix=None, if os.sep == '\\': sep = r'\\' if _PYTHON_VERSION <= (3, 2): - pattern_re = '^' + base + sep.join((prefix_re, - '.*' + pattern_re)) + pattern_re = f"^{base}{sep.join((prefix_re, f'.*{pattern_re}'))}" else: pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] - pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep, - pattern_re, end) - else: # no prefix -- respect anchor flag - if anchor: - if _PYTHON_VERSION <= (3, 2): - pattern_re = '^' + base + pattern_re - else: - pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):]) - + pattern_re = f'{start}{base}{prefix_re}{sep}.*{pattern_re}{end}' + elif anchor: + pattern_re = ( + f'^{base}{pattern_re}' + if _PYTHON_VERSION <= (3, 2) + else f'{start}{base}{pattern_re[len(start):]}' + ) return re.compile(pattern_re) def _glob_to_re(self, pattern): diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/markers.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/markers.py index 82fcfb8..03a48a6 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/markers.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/markers.py @@ -23,9 +23,7 @@ __all__ = ['interpret'] def _is_literal(o): - if not isinstance(o, string_types) or not o: - return False - return o[0] in '\'"' + return o[0] in '\'"' if isinstance(o, string_types) and o else False class Evaluator(object): """ @@ -55,19 +53,19 @@ def evaluate(self, expr, context): if isinstance(expr, string_types): if expr[0] in '\'"': result = expr[1:-1] - else: - if expr not in context: - raise SyntaxError('unknown variable: %s' % expr) + elif expr in context: result = context[expr] + else: + raise SyntaxError(f'unknown variable: {expr}') else: assert isinstance(expr, dict) op = expr['op'] if op not in self.operations: - raise NotImplementedError('op not implemented: %s' % op) + raise NotImplementedError(f'op not implemented: {op}') elhs = expr['lhs'] erhs = expr['rhs'] - if _is_literal(expr['lhs']) and _is_literal(expr['rhs']): - raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs)) + if _is_literal(elhs) and _is_literal(erhs): + raise SyntaxError(f'invalid comparison: {elhs} {op} {erhs}') lhs = self.evaluate(elhs, context) rhs = self.evaluate(erhs, context) @@ -76,7 +74,7 @@ def evaluate(self, expr, context): def default_context(): def format_full_version(info): - version = '%s.%s.%s' % (info.major, info.minor, info.micro) + version = f'{info.major}.{info.minor}.{info.micro}' kind = info.releaselevel if kind != 'final': version += kind[0] + str(info.serial) @@ -122,10 +120,10 @@ def interpret(marker, execution_context=None): try: expr, rest = parse_marker(marker) except Exception as e: - raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e)) + raise SyntaxError(f'Unable to interpret marker syntax: {marker}: {e}') if rest and rest[0] != '#': - raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest)) + raise SyntaxError(f'unexpected trailing data in marker: {marker}: {rest}') context = dict(DEFAULT_CONTEXT) if execution_context: - context.update(execution_context) + context |= execution_context return evaluator.evaluate(expr, context) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/metadata.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/metadata.py index 6b1bcc1..599f746 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/metadata.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/metadata.py @@ -153,10 +153,13 @@ def _has_marker(keys, markers): if key not in _566_FIELDS and '1.3' in possible_versions: possible_versions.remove('1.3') logger.debug('Removed 1.3 due to %s', key) - if key not in _566_FIELDS and '2.1' in possible_versions: - if key != 'Description': # In 2.1, description allowed after headers - possible_versions.remove('2.1') - logger.debug('Removed 2.1 due to %s', key) + if ( + key not in _566_FIELDS + and '2.1' in possible_versions + and key != 'Description' + ): + possible_versions.remove('2.1') + logger.debug('Removed 2.1 due to %s', key) if key not in _426_FIELDS and '2.0' in possible_versions: possible_versions.remove('2.0') logger.debug('Removed 2.0 due to %s', key) @@ -164,7 +167,7 @@ def _has_marker(keys, markers): # possible_version contains qualified versions if len(possible_versions) == 1: return possible_versions[0] # found ! - elif len(possible_versions) == 0: + elif not possible_versions: logger.debug('Out of options - unknown metadata set: %s', fields) raise MetadataConflictError('Unknown metadata set') @@ -181,18 +184,19 @@ def _has_marker(keys, markers): # - 1.1 is to avoid # - 1.2 fixes Summary but has little adoption # - 2.0 adds more features and is very new - if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0: - # we couldn't find any specific marker - if PKG_INFO_PREFERRED_VERSION in possible_versions: - return PKG_INFO_PREFERRED_VERSION + if ( + not is_1_1 + and not is_1_2 + and not is_2_1 + and not is_2_0 + and PKG_INFO_PREFERRED_VERSION in possible_versions + ): + return PKG_INFO_PREFERRED_VERSION if is_1_1: return '1.1' if is_1_2: return '1.2' - if is_2_1: - return '2.1' - - return '2.0' + return '2.1' if is_2_1 else '2.0' # This follows the rules about transforming keys as described in # https://www.python.org/dev/peps/pep-0566/#id17 @@ -230,7 +234,7 @@ def _get_name_and_version(name, version, for_filename=False): # spaces in the version string become '.' name = _FILESAFE.sub('-', name) version = _FILESAFE.sub('-', version.replace(' ', '.')) - return '%s-%s' % (name, version) + return f'{name}-{version}' class LegacyMetadata(object): @@ -291,9 +295,7 @@ def _convert_name(self, name): return _ATTR2FIELD.get(name, name) def _default_value(self, name): - if name in _LISTFIELDS or name in _ELEMENTSFIELD: - return [] - return 'UNKNOWN' + return [] if name in _LISTFIELDS or name in _ELEMENTSFIELD else 'UNKNOWN' def _remove_line_prefix(self, value): if self.metadata_version in ('1.0', '1.1'): @@ -359,7 +361,7 @@ def read_file(self, fileob): # PEP 566 specifies that the body be used for the description, if # available body = msg.get_payload() - self["Description"] = body if body else self["Description"] + self["Description"] = body or self["Description"] # logger.debug('Attempting to set metadata for %s', self) # self.set_metadata_version() @@ -436,11 +438,7 @@ def set(self, name, value): value = [] elif (name in _LISTFIELDS and not isinstance(value, (list, tuple))): - if isinstance(value, string_types): - value = [value] - else: - value = [] - + value = [value] if isinstance(value, string_types) else [] if logger.isEnabledFor(logging.WARNING): project_name = self['Name'] @@ -462,9 +460,8 @@ def set(self, name, value): logger.warning("'%s': '%s' is not a valid version (field '%s')", project_name, value, name) - if name in _UNICODEFIELDS: - if name == 'Description': - value = self._remove_line_prefix(value) + if name in _UNICODEFIELDS and name == 'Description': + value = self._remove_line_prefix(value) self._fields[name] = value @@ -476,8 +473,7 @@ def get(self, name, default=_MISSING): default = self._default_value(name) return default if name in _UNICODEFIELDS: - value = self._fields[name] - return value + return self._fields[name] elif name in _LISTFIELDS: value = self._fields[name] if value is None: @@ -510,7 +506,7 @@ def check(self, strict=False): missing.append(attr) if strict and missing != []: - msg = 'missing required metadata: %s' % ', '.join(missing) + msg = f"missing required metadata: {', '.join(missing)}" raise MetadataMissingError(msg) for attr in ('Home-page', 'Author'): @@ -537,7 +533,7 @@ def are_valid_constraints(value): for field in fields: value = self.get(field, None) if value is not None and not controller(value): - warnings.append("Wrong value for '%s': %s" % (field, value)) + warnings.append(f"Wrong value for '{field}': {value}") return missing, warnings @@ -579,8 +575,7 @@ def keys(self): return list(_version2fieldlist(self['Metadata-Version'])) def __iter__(self): - for key in self.keys(): - yield key + yield from self.keys() def values(self): return [self[key] for key in self.keys()] @@ -589,8 +584,7 @@ def items(self): return [(key, self[key]) for key in self.keys()] def __repr__(self): - return '<%s %s %s>' % (self.__class__.__name__, self.name, - self.version) + return f'<{self.__class__.__name__} {self.name} {self.version}>' METADATA_FILENAME = 'pydist.json' @@ -727,20 +721,16 @@ def __getattribute__(self, key): # special cases for PEP 459 sentinel = object() result = sentinel - d = self._data.get('extensions') - if d: + if d := self._data.get('extensions'): if key == 'commands': result = d.get('python.commands', value) elif key == 'classifiers': - d = d.get('python.details') - if d: - result = d.get(key, value) - else: - d = d.get('python.exports') - if not d: - d = self._data.get('python.exports') - if d: + if d := d.get('python.details'): result = d.get(key, value) + elif d := d.get('python.exports') or self._data.get( + 'python.exports' + ): + result = d.get(key, value) if result is sentinel: result = value elif key not in common: @@ -788,13 +778,9 @@ def __setattr__(self, key, value): elif key not in common: object.__setattr__(self, key, value) else: - if key == 'keywords': - if isinstance(value, string_types): - value = value.strip() - if value: - value = value.split() - else: - value = [] + if key == 'keywords' and isinstance(value, string_types): + value = value.strip() + value = value.split() if value else [] if self._legacy: self._legacy[key] = value else: @@ -810,7 +796,7 @@ def provides(self): result = self._legacy['Provides-Dist'] else: result = self._data.setdefault('provides', []) - s = '%s (%s)' % (self.name, self.version) + s = f'{self.name} ({self.version})' if s not in result: result.append(s) return result @@ -841,34 +827,26 @@ def get_requirements(self, reqts, extras=None, env=None): # unconditional include = True else: - if 'extra' not in d: - # Not extra-dependent - only environment-dependent - include = True - else: - include = d.get('extra') in extras + include = True if 'extra' not in d else d.get('extra') in extras if include: - # Not excluded because of extras, check environment - marker = d.get('environment') - if marker: + if marker := d.get('environment'): include = interpret(marker, env) if include: result.extend(d['requires']) for key in ('build', 'dev', 'test'): - e = ':%s:' % key + e = f':{key}:' if e in extras: extras.remove(e) # A recursive call, but it should terminate since 'test' # has been removed from the extras - reqts = self._data.get('%s_requires' % key, []) + reqts = self._data.get(f'{key}_requires', []) result.extend(self.get_requirements(reqts, extras=extras, env=env)) return result @property def dictionary(self): - if self._legacy: - return self._from_legacy() - return self._data + return self._from_legacy() if self._legacy else self._data @property def dependencies(self): @@ -887,13 +865,12 @@ def dependencies(self, value): def _validate_mapping(self, mapping, scheme): if mapping.get('metadata_version') != self.METADATA_VERSION: raise MetadataUnrecognizedVersionError() - missing = [] - for key, exclusions in self.MANDATORY_KEYS.items(): - if key not in mapping: - if scheme not in exclusions: - missing.append(key) - if missing: - msg = 'Missing metadata items: %s' % ', '.join(missing) + if missing := [ + key + for key, exclusions in self.MANDATORY_KEYS.items() + if key not in mapping and scheme not in exclusions + ]: + msg = f"Missing metadata items: {', '.join(missing)}" raise MetadataMissingError(msg) for k, v in mapping.items(): self._validate_value(k, v, scheme) @@ -911,8 +888,7 @@ def todict(self): if self._legacy: return self._legacy.todict(True) else: - result = extract_by_key(self._data, self.INDEX_KEYS) - return result + return extract_by_key(self._data, self.INDEX_KEYS) def _from_legacy(self): assert self._legacy and not self._data @@ -924,10 +900,7 @@ def _from_legacy(self): for k in ('name', 'version', 'license', 'summary', 'description', 'classifier'): if k in lmd: - if k == 'classifier': - nk = 'classifiers' - else: - nk = k + nk = 'classifiers' if k == 'classifier' else k result[nk] = lmd[k] kw = lmd.get('Keywords', []) if kw == ['']: @@ -969,12 +942,9 @@ def process_entries(entries): else: marker = '' if extra: - marker = 'extra == "%s"' % extra + marker = f'extra == "{extra}"' if env: - if marker: - marker = '(%s) and %s' % (env, marker) - else: - marker = env + marker = f'({env}) and {marker}' if marker else env reqts.add(';'.join((r, marker))) return reqts @@ -1011,19 +981,13 @@ def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): raise ValueError('Exactly one of path and fileobj is needed') self.validate() if legacy: - if self._legacy: - legacy_md = self._legacy - else: - legacy_md = self._to_legacy() + legacy_md = self._legacy or self._to_legacy() if path: legacy_md.write(path, skip_unknown=skip_unknown) else: legacy_md.write_file(fileobj, skip_unknown=skip_unknown) else: - if self._legacy: - d = self._from_legacy() - else: - d = self._data + d = self._from_legacy() if self._legacy else self._data if fileobj: json.dump(d, fileobj, ensure_ascii=True, indent=2, sort_keys=True) @@ -1037,11 +1001,14 @@ def add_requirements(self, requirements): self._legacy.add_requirements(requirements) else: run_requires = self._data.setdefault('run_requires', []) - always = None - for entry in run_requires: - if 'environment' not in entry and 'extra' not in entry: - always = entry - break + always = next( + ( + entry + for entry in run_requires + if 'environment' not in entry and 'extra' not in entry + ), + None, + ) if always is None: always = { 'requires': requirements } run_requires.insert(0, always) @@ -1052,5 +1019,4 @@ def add_requirements(self, requirements): def __repr__(self): name = self.name or '(no name)' version = self.version or 'no version' - return '<%s %s %s (%s)>' % (self.__class__.__name__, - self.metadata_version, name, version) + return f'<{self.__class__.__name__} {self.metadata_version} {name} ({version})>' diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/resources.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/resources.py index cd618a6..d058e7a 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/resources.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/resources.py @@ -29,7 +29,7 @@ class ResourceCache(Cache): def __init__(self, base=None): if base is None: # Use native string to avoid issues on 2.x: see Python #20140. - base = os.path.join(get_cache_base(), str('resource-cache')) + base = os.path.join(get_cache_base(), 'resource-cache') super(ResourceCache, self).__init__(base) def is_stale(self, resource, path): @@ -58,10 +58,7 @@ def get(self, resource): dirname = os.path.dirname(result) if not os.path.isdir(dirname): os.makedirs(dirname) - if not os.path.exists(result): - stale = True - else: - stale = self.is_stale(resource, path) + stale = self.is_stale(resource, path) if os.path.exists(result) else True if stale: # write the bytes of the resource to the cache location with open(result, 'wb') as f: @@ -137,10 +134,7 @@ def _adjust_path(self, path): def _make_path(self, resource_name): # Issue #50: need to preserve type of path on Python 2.x # like os.path._get_sep - if isinstance(resource_name, bytes): # should only happen on 2.x - sep = b'/' - else: - sep = '/' + sep = b'/' if isinstance(resource_name, bytes) else '/' parts = resource_name.split(sep) parts.insert(0, self.base) result = os.path.join(*parts) @@ -178,7 +172,8 @@ def get_resources(self, resource): def allowed(f): return (f != '__pycache__' and not f.endswith(self.skipped_extensions)) - return set([f for f in os.listdir(resource.path) if allowed(f)]) + + return {f for f in os.listdir(resource.path) if allowed(f)} def is_container(self, resource): return self._is_directory(resource.path) @@ -195,10 +190,7 @@ def iterator(self, resource_name): if resource.is_container: rname = resource.name for name in resource.resources: - if not rname: - new_name = name - else: - new_name = '/'.join([rname, name]) + new_name = '/'.join([rname, name]) if rname else name child = self.find(new_name) if child.is_container: todo.append(child) @@ -264,9 +256,7 @@ def get_resources(self, resource): plen = len(path) result = set() i = bisect.bisect(self.index, path) - while i < len(self.index): - if not self.index[i].startswith(path): - break + while i < len(self.index) and self.index[i].startswith(path): s = self.index[i][plen:] result.add(s.split(os.sep, 1)[0]) # only immediate children i += 1 @@ -332,7 +322,7 @@ def finder(package): return result -_dummy_module = types.ModuleType(str('__dummy__')) +_dummy_module = types.ModuleType('__dummy__') def finder_for_path(path): @@ -346,8 +336,7 @@ def finder_for_path(path): # calls any path hooks, gets importer into cache pkgutil.get_importer(path) loader = sys.path_importer_cache.get(path) - finder = _finder_registry.get(type(loader)) - if finder: + if finder := _finder_registry.get(type(loader)): module = _dummy_module module.__file__ = os.path.join(path, '') module.__loader__ = loader diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/scripts.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/scripts.py index 185c120..785b410 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/scripts.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/scripts.py @@ -57,10 +57,9 @@ def enquote_executable(executable): if executable.startswith('/usr/bin/env '): env, _executable = executable.split(' ', 1) if ' ' in _executable and not _executable.startswith('"'): - executable = '%s "%s"' % (env, _executable) - else: - if not executable.startswith('"'): - executable = '"%s"' % executable + executable = f'{env} "{_executable}"' + elif not executable.startswith('"'): + executable = f'"{executable}"' return executable # Keep the old name around (for now), as there is at least one project using it! @@ -85,7 +84,7 @@ def __init__(self, source_dir, target_dir, add_launchers=True, # It only makes sense to set mode bits on POSIX. self.set_mode = (os.name == 'posix') or (os.name == 'java' and os._name == 'posix') - self.variants = set(('', 'X.Y')) + self.variants = {'', 'X.Y'} self._fileop = fileop or FileOperator(dry_run) self._is_nt = os.name == 'nt' or ( @@ -122,7 +121,7 @@ def _fix_jython_executable(self, executable): elif executable.lower().endswith('jython.exe'): # Use wrapper exe for Jython on Windows return executable - return '/usr/bin/env %s' % executable + return f'/usr/bin/env {executable}' def _build_shebang(self, executable, post_interp): """ @@ -140,10 +139,7 @@ def _build_shebang(self, executable, post_interp): else: # Add 3 for '#!' prefix and newline suffix. shebang_length = len(executable) + len(post_interp) + 3 - if sys.platform == 'darwin': - max_shebang_length = 512 - else: - max_shebang_length = 127 + max_shebang_length = 512 if sys.platform == 'darwin' else 127 simple_shebang = ((b' ' not in executable) and (shebang_length <= max_shebang_length)) @@ -163,13 +159,15 @@ def _get_shebang(self, encoding, post_interp=b'', options=None): elif not sysconfig.is_python_build(): executable = get_executable() elif in_venv(): # pragma: no cover - executable = os.path.join(sysconfig.get_path('scripts'), - 'python%s' % sysconfig.get_config_var('EXE')) + executable = os.path.join( + sysconfig.get_path('scripts'), + f"python{sysconfig.get_config_var('EXE')}", + ) else: # pragma: no cover executable = os.path.join( sysconfig.get_config_var('BINDIR'), - 'python%s%s' % (sysconfig.get_config_var('VERSION'), - sysconfig.get_config_var('EXE'))) + f"python{sysconfig.get_config_var('VERSION')}{sysconfig.get_config_var('EXE')}", + ) if options: executable = self._get_alternate_executable(executable, options) @@ -238,10 +236,7 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): if not use_launcher: script_bytes = shebang + script_bytes else: # pragma: no cover - if ext == 'py': - launcher = self._get_launcher('t') - else: - launcher = self._get_launcher('w') + launcher = self._get_launcher('t') if ext == 'py' else self._get_launcher('w') stream = BytesIO() with ZipFile(stream, 'w') as zf: zf.writestr('__main__.py', script_bytes) @@ -253,14 +248,14 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): n, e = os.path.splitext(outname) if e.startswith('.py'): outname = n - outname = '%s.exe' % outname + outname = f'{outname}.exe' try: self._fileop.write_binary_file(outname, script_bytes) except Exception: # Failed writing an executable - it might be in use. logger.warning('Failed to write executable - trying to ' 'use .deleteme logic') - dfname = '%s.deleteme' % outname + dfname = f'{outname}.deleteme' if os.path.exists(dfname): os.remove(dfname) # Not allowed to fail here os.rename(outname, dfname) # nor here @@ -272,8 +267,8 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): except Exception: pass # still in use - ignore error else: - if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover - outname = '%s.%s' % (outname, ext) + if self._is_nt and not outname.endswith(f'.{ext}'): # pragma: no cover + outname = f'{outname}.{ext}' if os.path.exists(outname) and not self.clobber: logger.warning('Skipping existing file %s', outname) continue @@ -285,9 +280,8 @@ def _write_script(self, names, shebang, script_bytes, filenames, ext): def _make_script(self, entry, filenames, options=None): post_interp = b'' if options: - args = options.get('interpreter_args', []) - if args: - args = ' %s' % ' '.join(args) + if args := options.get('interpreter_args', []): + args = f" {' '.join(args)}" post_interp = args.encode('utf-8') shebang = self._get_shebang('utf-8', post_interp, options=options) script = self._get_script_text(entry).encode('utf-8') @@ -296,14 +290,10 @@ def _make_script(self, entry, filenames, options=None): if '' in self.variants: scriptnames.add(name) if 'X' in self.variants: - scriptnames.add('%s%s' % (name, self.version_info[0])) + scriptnames.add(f'{name}{self.version_info[0]}') if 'X.Y' in self.variants: - scriptnames.add('%s-%s.%s' % (name, self.version_info[0], - self.version_info[1])) - if options and options.get('gui', False): - ext = 'pyw' - else: - ext = 'py' + scriptnames.add(f'{name}-{self.version_info[0]}.{self.version_info[1]}') + ext = 'pyw' if options and options.get('gui', False) else 'py' self._write_script(scriptnames, shebang, script, filenames, ext) def _copy_script(self, script, filenames): @@ -330,8 +320,7 @@ def _copy_script(self, script, filenames): self.get_command_name(), script) return - match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) - if match: + if match := FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')): adjust = True post_interp = match.group(1) or b'' @@ -349,10 +338,7 @@ def _copy_script(self, script, filenames): encoding, lines = detect_encoding(f.readline) f.seek(0) shebang = self._get_shebang(encoding, post_interp) - if b'pythonw' in first_line: # pragma: no cover - ext = 'pyw' - else: - ext = 'py' + ext = 'pyw' if b'pythonw' in first_line else 'py' n = os.path.basename(outname) self._write_script([n], shebang, f.read(), filenames, ext) if f: @@ -371,18 +357,14 @@ def dry_run(self, value): # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ def _get_launcher(self, kind): - if struct.calcsize('P') == 8: # 64-bit - bits = '64' - else: - bits = '32' - name = '%s%s.exe' % (kind, bits) + bits = '64' if struct.calcsize('P') == 8 else '32' + name = f'{kind}{bits}.exe' # Issue 31: don't hardcode an absolute package name, but # determine it relative to the current package distlib_package = __name__.rsplit('.', 1)[0] resource = finder(distlib_package).find(name) if not resource: - msg = ('Unable to find resource %s in package %s' % (name, - distlib_package)) + msg = f'Unable to find resource {name} in package {distlib_package}' raise ValueError(msg) return resource.bytes diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/util.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/util.py index 373f77a..6ee7c81 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/util.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distlib/util.py @@ -73,7 +73,7 @@ def marker_var(remaining): else: q = remaining[0] if q not in '\'"': - raise SyntaxError('invalid expression: %s' % remaining) + raise SyntaxError(f'invalid expression: {remaining}') oq = '\'"'.replace(q, '') remaining = remaining[1:] parts = [q] @@ -87,12 +87,12 @@ def marker_var(remaining): else: m = STRING_CHUNK.match(remaining) if not m: - raise SyntaxError('error in string literal: %s' % remaining) + raise SyntaxError(f'error in string literal: {remaining}') parts.append(m.groups()[0]) remaining = remaining[m.end():] else: s = ''.join(parts) - raise SyntaxError('unterminated string: %s' % s) + raise SyntaxError(f'unterminated string: {s}') parts.append(q) result = ''.join(parts) remaining = remaining[1:].lstrip() # skip past closing quote @@ -102,7 +102,7 @@ def marker_expr(remaining): if remaining and remaining[0] == '(': result, remaining = marker(remaining[1:].lstrip()) if remaining[0] != ')': - raise SyntaxError('unterminated parenthesis: %s' % remaining) + raise SyntaxError(f'unterminated parenthesis: {remaining}') remaining = remaining[1:].lstrip() else: lhs, remaining = marker_var(remaining) @@ -152,27 +152,27 @@ def parse_requirement(req): return None m = IDENTIFIER.match(remaining) if not m: - raise SyntaxError('name expected: %s' % remaining) + raise SyntaxError(f'name expected: {remaining}') distname = m.groups()[0] remaining = remaining[m.end():] extras = mark_expr = versions = uri = None if remaining and remaining[0] == '[': i = remaining.find(']', 1) if i < 0: - raise SyntaxError('unterminated extra: %s' % remaining) + raise SyntaxError(f'unterminated extra: {remaining}') s = remaining[1:i] remaining = remaining[i + 1:].lstrip() extras = [] while s: m = IDENTIFIER.match(s) if not m: - raise SyntaxError('malformed extra: %s' % s) + raise SyntaxError(f'malformed extra: {s}') extras.append(m.groups()[0]) s = s[m.end():] if not s: break if s[0] != ',': - raise SyntaxError('comma expected in extras: %s' % s) + raise SyntaxError(f'comma expected in extras: {s}') s = s[1:].lstrip() if not extras: extras = None @@ -182,7 +182,7 @@ def parse_requirement(req): remaining = remaining[1:].lstrip() m = NON_SPACE.match(remaining) if not m: - raise SyntaxError('invalid URI: %s' % remaining) + raise SyntaxError(f'invalid URI: {remaining}') uri = m.groups()[0] t = urlparse(uri) # there are issues with Python and URL parsing, so this test @@ -190,7 +190,7 @@ def parse_requirement(req): # always parse invalid URLs correctly - it should raise # exceptions for malformed URLs if not (t.scheme and t.netloc): - raise SyntaxError('Invalid URL: %s' % uri) + raise SyntaxError(f'Invalid URL: {uri}') remaining = remaining[m.end():].lstrip() else: @@ -208,7 +208,7 @@ def get_versions(ver_remaining): ver_remaining = ver_remaining[m.end():] m = VERSION_IDENTIFIER.match(ver_remaining) if not m: - raise SyntaxError('invalid version: %s' % ver_remaining) + raise SyntaxError(f'invalid version: {ver_remaining}') v = m.groups()[0] versions.append((op, v)) ver_remaining = ver_remaining[m.end():] @@ -217,7 +217,7 @@ def get_versions(ver_remaining): ver_remaining = ver_remaining[1:].lstrip() m = COMPARE_OP.match(ver_remaining) if not m: - raise SyntaxError('invalid constraint: %s' % ver_remaining) + raise SyntaxError(f'invalid constraint: {ver_remaining}') if not versions: versions = None return versions, ver_remaining @@ -227,7 +227,7 @@ def get_versions(ver_remaining): else: i = remaining.find(')', 1) if i < 0: - raise SyntaxError('unterminated parenthesis: %s' % remaining) + raise SyntaxError(f'unterminated parenthesis: {remaining}') s = remaining[1:i] remaining = remaining[i + 1:].lstrip() # As a special diversion from PEP 508, allow a version number @@ -238,27 +238,28 @@ def get_versions(ver_remaining): else: m = VERSION_IDENTIFIER.match(s) if not m: - raise SyntaxError('invalid constraint: %s' % s) + raise SyntaxError(f'invalid constraint: {s}') v = m.groups()[0] s = s[m.end():].lstrip() if s: - raise SyntaxError('invalid constraint: %s' % s) + raise SyntaxError(f'invalid constraint: {s}') versions = [('~=', v)] if remaining: if remaining[0] != ';': - raise SyntaxError('invalid requirement: %s' % remaining) + raise SyntaxError(f'invalid requirement: {remaining}') remaining = remaining[1:].lstrip() mark_expr, remaining = parse_marker(remaining) if remaining and remaining[0] != '#': - raise SyntaxError('unexpected trailing data: %s' % remaining) + raise SyntaxError(f'unexpected trailing data: {remaining}') - if not versions: - rs = distname - else: - rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) + rs = ( + f"{distname} {', '.join(['%s %s' % con for con in versions])}" + if versions + else distname + ) return Container(name=distname, extras=extras, constraints=versions, marker=mark_expr, url=uri, requirement=rs) @@ -285,18 +286,16 @@ def get_rel_path(root, path): else: rel_path = get_rel_path(abs_base, abs_path) rel_dest = dest.replace(os.path.sep, '/').rstrip('/') - destinations[resource_file] = rel_dest + '/' + rel_path + destinations[resource_file] = f'{rel_dest}/{rel_path}' return destinations def in_venv(): - if hasattr(sys, 'real_prefix'): - # virtualenv venvs - result = True - else: - # PEP 405 venvs - result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) - return result + return ( + True + if hasattr(sys, 'real_prefix') + else sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + ) def get_executable(): @@ -334,11 +333,7 @@ def proceed(prompt, allowed_chars, error_prompt=None, default=None): def extract_by_key(d, keys): if isinstance(keys, string_types): keys = keys.split() - result = {} - for key in keys: - if key in d: - result[key] = d[key] - return result + return {key: d[key] for key in keys if key in d} def read_exports(stream): if sys.version_info[0] >= 3: @@ -352,7 +347,7 @@ def read_exports(stream): result = jdata['extensions']['python.exports']['exports'] for group, entries in result.items(): for k, v in entries.items(): - s = '%s = %s' % (k, v) + s = f'{k} = {v}' entry = get_export_entry(s) assert entry is not None entries[k] = entry @@ -379,7 +374,7 @@ def read_stream(cp, stream): for key in cp.sections(): result[key] = entries = {} for name, value in cp.items(key): - s = '%s = %s' % (name, value) + s = f'{name} = {value}' entry = get_export_entry(s) assert entry is not None #entry.dist = self @@ -396,12 +391,9 @@ def write_exports(exports, stream): # TODO check k, v for valid values cp.add_section(k) for entry in v.values(): - if entry.suffix is None: - s = entry.prefix - else: - s = '%s:%s' % (entry.prefix, entry.suffix) + s = entry.prefix if entry.suffix is None else f'{entry.prefix}:{entry.suffix}' if entry.flags: - s = '%s [%s]' % (s, ', '.join(entry.flags)) + s = f"{s} [{', '.join(entry.flags)}]" cp.set(k, entry.name, s) cp.write(stream) @@ -463,16 +455,14 @@ def convert_path(pathname): if not pathname: return pathname if pathname[0] == '/': - raise ValueError("path '%s' cannot be absolute" % pathname) + raise ValueError(f"path '{pathname}' cannot be absolute") if pathname[-1] == '/': - raise ValueError("path '%s' cannot end with '/'" % pathname) + raise ValueError(f"path '{pathname}' cannot end with '/'") paths = pathname.split('/') while os.curdir in paths: paths.remove(os.curdir) - if not paths: - return os.curdir - return os.path.join(*paths) + return os.path.join(*paths) if paths else os.curdir class FileOperator(object): @@ -505,10 +495,11 @@ def newer(self, source, target): if not os.path.exists(source): raise DistlibException("file '%r' does not exist" % os.path.abspath(source)) - if not os.path.exists(target): - return True - - return os.stat(source).st_mtime > os.stat(target).st_mtime + return ( + os.stat(source).st_mtime > os.stat(target).st_mtime + if os.path.exists(target) + else True + ) def copy_file(self, infile, outfile, check=True): """Copy a file respecting dry-run and force flags. @@ -519,11 +510,11 @@ def copy_file(self, infile, outfile, check=True): msg = None if check: if os.path.islink(outfile): - msg = '%s is a symlink' % outfile + msg = f'{outfile} is a symlink' elif os.path.exists(outfile) and not os.path.isfile(outfile): - msg = '%s is a non-regular file' % outfile + msg = f'{outfile} is a non-regular file' if msg: - raise ValueError(msg + ' which would be overwritten') + raise ValueError(f'{msg} which would be overwritten') shutil.copyfile(infile, outfile) self.record_as_written(outfile) @@ -574,7 +565,7 @@ def ensure_dir(self, path): self.ensured.add(path) d, f = os.path.split(path) self.ensure_dir(d) - logger.info('Creating %s' % path) + logger.info(f'Creating {path}') if not self.dry_run: os.mkdir(path) if self.record: @@ -598,25 +589,21 @@ def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_in return dpath def ensure_removed(self, path): - if os.path.exists(path): - if os.path.isdir(path) and not os.path.islink(path): - logger.debug('Removing directory tree at %s', path) - if not self.dry_run: - shutil.rmtree(path) - if self.record: - if path in self.dirs_created: - self.dirs_created.remove(path) - else: - if os.path.islink(path): - s = 'link' - else: - s = 'file' - logger.debug('Removing %s %s', s, path) - if not self.dry_run: - os.remove(path) - if self.record: - if path in self.files_written: - self.files_written.remove(path) + if not os.path.exists(path): + return + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record and path in self.dirs_created: + self.dirs_created.remove(path) + else: + s = 'link' if os.path.islink(path) else 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record and path in self.files_written: + self.files_written.remove(path) def is_writable(self, path): result = False @@ -650,8 +637,7 @@ def rollback(self): # reverse so that subdirs appear before their parents dirs = sorted(self.dirs_created, reverse=True) for d in dirs: - flist = os.listdir(d) - if flist: + if flist := os.listdir(d): assert flist == ['__pycache__'] sd = os.path.join(d, flist[0]) os.rmdir(sd) @@ -685,18 +671,19 @@ def value(self): return resolve(self.prefix, self.suffix) def __repr__(self): # pragma: no cover - return '' % (self.name, self.prefix, - self.suffix, self.flags) + return f'' def __eq__(self, other): - if not isinstance(other, ExportEntry): - result = False - else: - result = (self.name == other.name and - self.prefix == other.prefix and - self.suffix == other.suffix and - self.flags == other.flags) - return result + return ( + ( + self.name == other.name + and self.prefix == other.prefix + and self.suffix == other.suffix + and self.flags == other.flags + ) + if isinstance(other, ExportEntry) + else False + ) __hash__ = object.__hash__ @@ -707,33 +694,29 @@ def __eq__(self, other): ''', re.VERBOSE) def get_export_entry(specification): - m = ENTRY_RE.search(specification) - if not m: - result = None - if '[' in specification or ']' in specification: - raise DistlibException("Invalid specification " - "'%s'" % specification) - else: + if m := ENTRY_RE.search(specification): d = m.groupdict() name = d['name'] path = d['callable'] colons = path.count(':') if colons == 0: prefix, suffix = path, None - else: - if colons != 1: - raise DistlibException("Invalid specification " - "'%s'" % specification) + elif colons == 1: prefix, suffix = path.split(':') + else: + raise DistlibException(f"Invalid specification '{specification}'") flags = d['flags'] if flags is None: if '[' in specification or ']' in specification: - raise DistlibException("Invalid specification " - "'%s'" % specification) + raise DistlibException(f"Invalid specification '{specification}'") flags = [] else: flags = [f.strip() for f in flags.split(',')] result = ExportEntry(name, prefix, suffix, flags) + else: + result = None + if '[' in specification or ']' in specification: + raise DistlibException(f"Invalid specification '{specification}'") return result @@ -796,9 +779,7 @@ def path_to_cache_dir(path): def ensure_slash(s): - if not s.endswith('/'): - return s + '/' - return s + return s if s.endswith('/') else f'{s}/' def parse_credentials(netloc): @@ -845,18 +826,15 @@ def split_filename(filename, project_name=None): result = None pyver = None filename = unquote(filename).replace(' ', '-') - m = PYTHON_VERSION.search(filename) - if m: + if m := PYTHON_VERSION.search(filename): pyver = m.group(1) filename = filename[:m.start()] if project_name and len(filename) > len(project_name) + 1: - m = re.match(re.escape(project_name) + r'\b', filename) - if m: + if m := re.match(re.escape(project_name) + r'\b', filename): n = m.end() result = filename[:n], filename[n + 1:], pyver if result is None: - m = PROJECT_NAME_AND_VERSION.match(filename) - if m: + if m := PROJECT_NAME_AND_VERSION.match(filename): result = m.group(1), m.group(3), pyver return result @@ -892,12 +870,12 @@ def get_extras(requested, available): elif r.startswith('-'): unwanted = r[1:] if unwanted not in available: - logger.warning('undeclared extra: %s' % unwanted) + logger.warning(f'undeclared extra: {unwanted}') if unwanted in result: result.remove(unwanted) else: if r not in available: - logger.warning('undeclared extra: %s' % r) + logger.warning(f'undeclared extra: {r}') result.add(r) return result # @@ -927,13 +905,12 @@ def _get_external_data(url): _external_data_base_url = 'https://www.red-dove.com/pypi/projects/' def get_project_data(name): - url = '%s/%s/project.json' % (name[0].upper(), name) + url = f'{name[0].upper()}/{name}/project.json' url = urljoin(_external_data_base_url, url) - result = _get_external_data(url) - return result + return _get_external_data(url) def get_package_data(name, version): - url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = f'{name[0].upper()}/{name}/package-{version}.json' url = urljoin(_external_data_base_url, url) return _get_external_data(url) @@ -1105,9 +1082,8 @@ def get_steps(self, final): if not self.is_step(final): raise ValueError('Unknown: %r' % final) result = [] - todo = [] seen = set() - todo.append(final) + todo = [final] while todo: step = todo.pop(0) if step in seen: @@ -1182,10 +1158,8 @@ def dot(self): result = ['digraph G {'] for succ in self._preds: preds = self._preds[succ] - for pred in preds: - result.append(' %s -> %s;' % (pred, succ)) - for node in self._nodes: - result.append(' %s;' % node) + result.extend(f' {pred} -> {succ};' for pred in preds) + result.extend(f' {node};' for node in self._nodes) result.append('}') return '\n'.join(result) @@ -1311,22 +1285,19 @@ def maximum(self): @property def percentage(self): if self.done: - result = '100 %' + return '100 %' elif self.max is None: - result = ' ?? %' + return ' ?? %' else: v = 100.0 * (self.cur - self.min) / (self.max - self.min) - result = '%3d %%' % v - return result + return '%3d %%' % v def format_duration(self, duration): - if (duration <= 0) and self.max is None or self.cur == self.min: - result = '??:??:??' - #elif duration < 1: - # result = '--:--:--' - else: - result = time.strftime('%H:%M:%S', time.gmtime(duration)) - return result + return ( + '??:??:??' + if (duration <= 0) and self.max is None or self.cur == self.min + else time.strftime('%H:%M:%S', time.gmtime(duration)) + ) @property def ETA(self): @@ -1345,14 +1316,11 @@ def ETA(self): t = float(self.max - self.min) t /= self.cur - self.min t = (t - 1) * self.elapsed - return '%s: %s' % (prefix, self.format_duration(t)) + return f'{prefix}: {self.format_duration(t)}' @property def speed(self): - if self.elapsed == 0: - result = 0.0 - else: - result = (self.cur - self.min) / self.elapsed + result = 0.0 if self.elapsed == 0 else (self.cur - self.min) / self.elapsed for unit in UNITS: if result < 1000: break @@ -1385,26 +1353,22 @@ def _iglob(path_glob): assert len(rich_path_glob) == 3, rich_path_glob prefix, set, suffix = rich_path_glob for item in set.split(','): - for path in _iglob(''.join((prefix, item, suffix))): - yield path + yield from _iglob(''.join((prefix, item, suffix))) + elif '**' not in path_glob: + yield from std_iglob(path_glob) else: - if '**' not in path_glob: - for item in std_iglob(path_glob): - yield item + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' else: - prefix, radical = path_glob.split('**', 1) - if prefix == '': - prefix = '.' - if radical == '': - radical = '*' - else: - # we support both - radical = radical.lstrip('/') - radical = radical.lstrip('\\') - for path, dir, files in os.walk(prefix): - path = os.path.normpath(path) - for fn in _iglob(os.path.join(path, radical)): - yield fn + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + yield from _iglob(os.path.join(path, radical)) if ssl: from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, @@ -1428,10 +1392,7 @@ def connect(self): if not hasattr(ssl, 'SSLContext'): # For 2.x - if self.ca_certs: - cert_reqs = ssl.CERT_REQUIRED - else: - cert_reqs = ssl.CERT_NONE + cert_reqs = ssl.CERT_REQUIRED if self.ca_certs else ssl.CERT_NONE self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, cert_reqs=cert_reqs, ssl_version=ssl.PROTOCOL_SSLv23, @@ -1534,13 +1495,11 @@ def __init__(self, timeout, use_datetime=0): def make_connection(self, host): h, eh, x509 = self.get_host_info(host) if _ver_info == (2, 6): - result = HTTP(h, timeout=self.timeout) - else: - if not self._connection or host != self._connection[0]: - self._extra_headers = eh - self._connection = host, httplib.HTTPConnection(h) - result = self._connection[1] - return result + return HTTP(h, timeout=self.timeout) + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + return self._connection[1] if ssl: class SafeTransport(xmlrpclib.SafeTransport): @@ -1554,14 +1513,12 @@ def make_connection(self, host): kwargs = {} kwargs['timeout'] = self.timeout if _ver_info == (2, 6): - result = HTTPS(host, None, **kwargs) - else: - if not self._connection or host != self._connection[0]: - self._extra_headers = eh - self._connection = host, httplib.HTTPSConnection(h, None, - **kwargs) - result = self._connection[1] - return result + return HTTPS(host, None, **kwargs) + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, + **kwargs) + return self._connection[1] class ServerProxy(xmlrpclib.ServerProxy): @@ -1572,10 +1529,7 @@ def __init__(self, uri, **kwargs): if timeout is not None: scheme, _ = splittype(uri) use_datetime = kwargs.get('use_datetime', 0) - if scheme == 'https': - tcls = SafeTransport - else: - tcls = Transport + tcls = SafeTransport if scheme == 'https' else Transport kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) self.transport = t xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) @@ -1685,7 +1639,7 @@ def convert(o): # Check for valid identifiers args = config.pop('[]', ()) if args: - args = tuple([convert(o) for o in args]) + args = tuple(convert(o) for o in args) items = [(k, convert(config[k])) for k in config if valid_ident(k)] kwargs = dict(items) result = c(*args, **kwargs) diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distro.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distro.py index d86113b..edec6dc 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distro.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/distro.py @@ -552,7 +552,7 @@ def __init__(self, f): self._f = f def __get__(self, obj, owner): - assert obj is not None, 'call {} on an instance'.format(self._fname) + assert obj is not None, f'call {self._fname} on an instance' ret = obj.__dict__[self._fname] = self._f(obj) return ret @@ -704,10 +704,7 @@ def normalize(distro_id, table): return normalize(distro_id, NORMALIZED_DISTRO_ID) distro_id = self.uname_attr('id') - if distro_id: - return normalize(distro_id, NORMALIZED_DISTRO_ID) - - return '' + return normalize(distro_id, NORMALIZED_DISTRO_ID) if distro_id else '' def name(self, pretty=False): """ @@ -716,18 +713,17 @@ def name(self, pretty=False): For details, see :func:`distro.name`. """ name = self.os_release_attr('name') \ - or self.lsb_release_attr('distributor_id') \ - or self.distro_release_attr('name') \ - or self.uname_attr('name') + or self.lsb_release_attr('distributor_id') \ + or self.distro_release_attr('name') \ + or self.uname_attr('name') if pretty: name = self.os_release_attr('pretty_name') \ - or self.lsb_release_attr('description') + or self.lsb_release_attr('description') if not name: name = self.distro_release_attr('name') \ - or self.uname_attr('name') - version = self.version(pretty=True) - if version: - name = name + ' ' + version + or self.uname_attr('name') + if version := self.version(pretty=True): + name = f'{name} {version}' return name or '' def version(self, pretty=False, best=False): @@ -771,11 +767,9 @@ def version_parts(self, best=False): For details, see :func:`distro.version_parts`. """ - version_str = self.version(best=best) - if version_str: + if version_str := self.version(best=best): version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?') - matches = version_regex.match(version_str) - if matches: + if matches := version_regex.match(version_str): major, minor, build_number = matches.groups() return major, minor or '', build_number or '' return '', '', '' @@ -972,10 +966,6 @@ def _parse_os_release_content(lines): if '=' in token: k, v = token.split('=', 1) props[k.lower()] = v - else: - # Ignore any tokens that are not variable assignments - pass - if 'version_codename' in props: # os-release added a version_codename field. Use that in # preference to anything else Note that some distros purposefully @@ -986,9 +976,7 @@ def _parse_os_release_content(lines): # Same as above but a non-standard field name used on older Ubuntus props['codename'] = props['ubuntu_codename'] elif 'version' in props: - # If there is no version_codename, parse it from the version - codename = re.search(r'(\(\D+\))|,(\s+)?\D+', props['version']) - if codename: + if codename := re.search(r'(\(\D+\))|,(\s+)?\D+', props['version']): codename = codename.group() codename = codename.strip('()') codename = codename.strip(',') @@ -1038,7 +1026,7 @@ def _parse_lsb_release_content(lines): # Ignore lines without colon. continue k, v = kv - props.update({k.replace(' ', '_').lower(): v.strip()}) + props[k.replace(' ', '_').lower()] = v.strip() return props @cached_property @@ -1055,8 +1043,7 @@ def _uname_info(self): @staticmethod def _parse_uname_content(lines): props = {} - match = re.search(r'^([^\s]+)\s+([\d\.]+)', lines[0].strip()) - if match: + if match := re.search(r'^([^\s]+)\s+([\d\.]+)', lines[0].strip()): name, version = match.groups() # This is to prevent the Linux kernel version from @@ -1077,9 +1064,8 @@ def _to_str(text): if sys.version_info[0] >= 3: if isinstance(text, bytes): return text.decode(encoding) - else: - if isinstance(text, unicode): # noqa - return text.encode(encoding) + elif isinstance(text, unicode): # noqa + return text.encode(encoding) return text @@ -1103,7 +1089,7 @@ def _distro_release_info(self): # possible. match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) if 'name' in distro_info \ - and 'cloudlinux' in distro_info['name'].lower(): + and 'cloudlinux' in distro_info['name'].lower(): distro_info['id'] = 'cloudlinux' elif match: distro_info['id'] = match.group(1) @@ -1138,8 +1124,7 @@ def _distro_release_info(self): for basename in basenames: if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: continue - match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if match: + if match := _DISTRO_RELEASE_BASENAME_PATTERN.match(basename): filepath = os.path.join(_UNIXCONFDIR, basename) distro_info = self._parse_distro_release_file(filepath) if 'name' in distro_info: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/pyparsing.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/pyparsing.py index f6b32d6..fa1e4ac 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/pyparsing.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/pyparsing.py @@ -265,21 +265,21 @@ def _ustr(obj): except AttributeError: continue -_generatorType = type((y for y in range(1))) +_generatorType = type(iter(range(1))) def _xml_escape(data): """Escape &, <, >, ", ', etc. in a string of data.""" # ampersand must be replaced first from_symbols = '&><"\'' - to_symbols = ('&' + s + ';' for s in "amp gt lt quot apos".split()) + to_symbols = (f'&{s};' for s in "amp gt lt quot apos".split()) for from_, to_ in zip(from_symbols, to_symbols): data = data.replace(from_, to_) return data alphas = string.ascii_uppercase + string.ascii_lowercase nums = "0123456789" -hexnums = nums + "ABCDEFabcdef" +hexnums = f"{nums}ABCDEFabcdef" alphanums = alphas + nums _bslash = chr(92) printables = "".join(c for c in string.printable if c not in string.whitespace) @@ -413,14 +413,13 @@ def explain(exc, depth=16): depth = sys.getrecursionlimit() ret = [] if isinstance(exc, ParseBaseException): - ret.append(exc.line) - ret.append(' ' * (exc.col - 1) + '^') + ret.extend((exc.line, ' ' * (exc.col - 1) + '^')) ret.append("{0}: {1}".format(type(exc).__name__, exc)) if depth > 0: callers = inspect.getinnerframes(exc.__traceback__, context=depth) seen = set() - for i, ff in enumerate(callers[-depth:]): + for ff in callers[-depth:]: frm = ff[0] f_self = frm.f_locals.get('self', None) @@ -487,7 +486,7 @@ def __init__(self, parseElementList): self.parseElementTrace = parseElementList def __str__(self): - return "RecursiveGrammarException: %s" % self.parseElementTrace + return f"RecursiveGrammarException: {self.parseElementTrace}" class _ParseResultsWithOffset(object): def __init__(self, p1, p2): @@ -567,7 +566,7 @@ def __init__(self, toklist=None, name=None, asList=True, modal=True, isinstance= self.__toklist = list(toklist) else: self.__toklist = [toklist] - self.__tokdict = dict() + self.__tokdict = {} if name is not None and name: if not modal: @@ -593,21 +592,21 @@ def __init__(self, toklist=None, name=None, asList=True, modal=True, isinstance= def __getitem__(self, i): if isinstance(i, (int, slice)): return self.__toklist[i] + if i in self.__accumNames: + return ParseResults([v[0] for v in self.__tokdict[i]]) + else: - if i not in self.__accumNames: - return self.__tokdict[i][-1][0] - else: - return ParseResults([v[0] for v in self.__tokdict[i]]) + return self.__tokdict[i][-1][0] def __setitem__(self, k, v, isinstance=isinstance): if isinstance(v, _ParseResultsWithOffset): - self.__tokdict[k] = self.__tokdict.get(k, list()) + [v] + self.__tokdict[k] = self.__tokdict.get(k, []) + [v] sub = v[0] elif isinstance(k, (int, slice)): self.__toklist[k] = v sub = v else: - self.__tokdict[k] = self.__tokdict.get(k, list()) + [_ParseResultsWithOffset(v, 0)] + self.__tokdict[k] = self.__tokdict.get(k, []) + [_ParseResultsWithOffset(v, 0)] sub = v if isinstance(sub, ParseResults): sub.__parent = wkref(self) @@ -741,17 +740,13 @@ def remove_LABEL(tokens): if k == 'default': args = (args[0], v) else: - raise TypeError("pop() got an unexpected keyword argument '%s'" % k) - if (isinstance(args[0], int) - or len(args) == 1 - or args[0] in self): - index = args[0] - ret = self[index] - del self[index] - return ret - else: - defaultvalue = args[1] - return defaultvalue + raise TypeError(f"pop() got an unexpected keyword argument '{k}'") + if not isinstance(args[0], int) and len(args) != 1 and args[0] not in self: + return args[1] + index = args[0] + ret = self[index] + del self[index] + return ret def get(self, key, defaultValue=None): """ @@ -771,10 +766,7 @@ def get(self, key, defaultValue=None): print(result.get("hour", "not specified")) # -> 'not specified' print(result.get("hour")) # -> None """ - if key in self: - return self[key] - else: - return defaultValue + return self[key] if key in self else defaultValue def insert(self, index, insStr): """ @@ -866,15 +858,10 @@ def __iadd__(self, other): return self def __radd__(self, other): - if isinstance(other, int) and other == 0: - # useful for merging many ParseResults using sum() builtin - return self.copy() - else: - # this may raise a TypeError - so be it - return other + self + return self.copy() if isinstance(other, int) and other == 0 else other + self def __repr__(self): - return "(%s, %s)" % (repr(self.__toklist), repr(self.__tokdict)) + return f"({repr(self.__toklist)}, {repr(self.__tokdict)})" def __str__(self): return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' @@ -927,21 +914,14 @@ def asDict(self): print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} """ - if PY_3: - item_fn = self.items - else: - item_fn = self.iteritems - + item_fn = self.items if PY_3 else self.iteritems def toItem(obj): if isinstance(obj, ParseResults): - if obj.haskeys(): - return obj.asDict() - else: - return [toItem(v) for v in obj] + return obj.asDict() if obj.haskeys() else [toItem(v) for v in obj] else: return obj - return dict((k, toItem(v)) for k, v in item_fn()) + return {k: toItem(v) for k, v in item_fn()} def copy(self): """ @@ -960,9 +940,8 @@ def asXML(self, doctag=None, namedItemsOnly=False, indent="", formatted=True): """ nl = "\n" out = [] - namedItems = dict((v[1], k) for (k, vlist) in self.__tokdict.items() - for v in vlist) - nextLevelIndent = indent + " " + namedItems = {v[1]: k for (k, vlist) in self.__tokdict.items() for v in vlist} + nextLevelIndent = f"{indent} " # collapse out indents if formatting is not desired if not formatted: @@ -973,9 +952,8 @@ def asXML(self, doctag=None, namedItemsOnly=False, indent="", formatted=True): selfTag = None if doctag is not None: selfTag = doctag - else: - if self.__name: - selfTag = self.__name + elif self.__name: + selfTag = self.__name if not selfTag: if namedItemsOnly: @@ -1050,11 +1028,7 @@ def getName(self): if self.__name: return self.__name elif self.__parent: - par = self.__parent() - if par: - return par.__lookup(self) - else: - return None + return par.__lookup(self) if (par := self.__parent()) else None elif (len(self) == 1 and len(self.__tokdict) == 1 and next(iter(self.__tokdict.values()))[0][1] in (0, -1)): @@ -1084,7 +1058,6 @@ def dump(self, indent='', full=True, include_list=True, _depth=0): - year: 12 """ out = [] - NL = '\n' if include_list: out.append(indent + _ustr(self.asList())) else: @@ -1093,10 +1066,11 @@ def dump(self, indent='', full=True, include_list=True, _depth=0): if full: if self.haskeys(): items = sorted((str(k), v) for k, v in self.items()) + NL = '\n' for k, v in items: if out: out.append(NL) - out.append("%s%s- %s: " % (indent, (' ' * _depth), k)) + out.append(f"{indent}{' ' * _depth}- {k}: ") if isinstance(v, ParseResults): if v: out.append(v.dump(indent=indent, full=full, include_list=include_list, _depth=_depth + 1)) @@ -1166,11 +1140,8 @@ def __setstate__(self, state): self.__toklist = state[0] self.__tokdict, par, inAccumNames, self.__name = state[1] self.__accumNames = {} - self.__accumNames.update(inAccumNames) - if par is not None: - self.__parent = wkref(par) - else: - self.__parent = None + self.__accumNames |= inAccumNames + self.__parent = wkref(par) if par is not None else None def __getnewargs__(self): return self.__toklist, self.__name, self.__asList, self.__modal @@ -1239,19 +1210,19 @@ def line(loc, strg): """ lastCR = strg.rfind("\n", 0, loc) nextCR = strg.find("\n", loc) - if nextCR >= 0: - return strg[lastCR + 1:nextCR] - else: - return strg[lastCR + 1:] + return strg[lastCR + 1:nextCR] if nextCR >= 0 else strg[lastCR + 1:] def _defaultStartDebugAction(instring, loc, expr): - print(("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % (lineno(loc, instring), col(loc, instring)))) + print( + f"Match {_ustr(expr)} at loc {_ustr(loc)}" + + "(%d,%d)" % (lineno(loc, instring), col(loc, instring)) + ) def _defaultSuccessDebugAction(instring, startloc, endloc, expr, toks): - print("Matched " + _ustr(expr) + " -> " + str(toks.asList())) + print(f"Matched {_ustr(expr)} -> {str(toks.asList())}") def _defaultExceptionDebugAction(instring, loc, expr, exc): - print("Exception raised:" + _ustr(exc)) + print(f"Exception raised:{_ustr(exc)}") def nullDebugAction(*args): """'Do-nothing' debug action, to suppress debugging output during parsing.""" @@ -1317,19 +1288,17 @@ def wrapper(*args): foundArity[0] = True return ret except TypeError: - # re-raise TypeErrors if they did not come from our arity testing if foundArity[0]: raise - else: + try: + tb = sys.exc_info()[-1] + if extract_tb(tb, limit=2)[-1][:2] != pa_call_line_synth: + raise + finally: try: - tb = sys.exc_info()[-1] - if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: - raise - finally: - try: - del tb - except NameError: - pass + del tb + except NameError: + pass if limit[0] <= maxargs: limit[0] += 1 @@ -1398,7 +1367,7 @@ def _trim_traceback(cls, tb): return tb def __init__(self, savelist=False): - self.parseAction = list() + self.parseAction = [] self.failAction = None # ~ self.name = "" # don't define self.name, let subclasses try/except upcall self.strRepr = None @@ -1409,7 +1378,7 @@ def __init__(self, savelist=False): self.copyDefaultWhiteChars = True self.mayReturnEmpty = False # used when checking for left-recursion self.keepTabs = False - self.ignoreExprs = list() + self.ignoreExprs = [] self.debug = False self.streamlined = False self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index @@ -1459,7 +1428,7 @@ def setName(self, name): Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) """ self.name = name - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" if __diag__.enable_debug_on_named_expressions: self.setDebug() return self @@ -1618,9 +1587,9 @@ def _skipIgnorables(self, instring, loc): exprsFound = False for e in self.ignoreExprs: try: + exprsFound = True while 1: loc, dummy = e._parse(instring, loc) - exprsFound = True except ParseException: pass return loc @@ -1653,10 +1622,11 @@ def _parseNoCache(self, instring, loc, doActions=True, callPreParse=True): if self.debugActions[TRY]: self.debugActions[TRY](instring, loc, self) try: - if callPreParse and self.callPreparse: - preloc = self.preParse(instring, loc) - else: - preloc = loc + preloc = ( + self.preParse(instring, loc) + if callPreParse and self.callPreparse + else loc + ) tokensStart = preloc if self.mayIndexError or preloc >= len(instring): try: @@ -1673,10 +1643,11 @@ def _parseNoCache(self, instring, loc, doActions=True, callPreParse=True): self.failAction(instring, tokensStart, self, err) raise else: - if callPreParse and self.callPreparse: - preloc = self.preParse(instring, loc) - else: - preloc = loc + preloc = ( + self.preParse(instring, loc) + if callPreParse and self.callPreparse + else loc + ) tokensStart = preloc if self.mayIndexError or preloc >= len(instring): try: @@ -1724,10 +1695,8 @@ def _parseNoCache(self, instring, loc, doActions=True, callPreParse=True): self.resultsName, asList=self.saveAsList and isinstance(tokens, (ParseResults, list)), modal=self.modalResults) - if debugging: - # ~ print ("Matched", self, "->", retTokens.asList()) - if self.debugActions[MATCH]: - self.debugActions[MATCH](instring, tokensStart, loc, self, retTokens) + if debugging and self.debugActions[MATCH]: + self.debugActions[MATCH](instring, tokensStart, loc, self, retTokens) return loc, retTokens @@ -1948,11 +1917,10 @@ def parseString(self, instring, parseAll=False): except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise - else: - # catch and re-raise exception from here, clearing out pyparsing internal stack trace - if getattr(exc, '__traceback__', None) is not None: - exc.__traceback__ = self._trim_traceback(exc.__traceback__) - raise exc + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) + raise exc else: return tokens @@ -2024,11 +1992,10 @@ def scanString(self, instring, maxMatches=_MAX_INT, overlap=False): except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise - else: - # catch and re-raise exception from here, clearing out pyparsing internal stack trace - if getattr(exc, '__traceback__', None) is not None: - exc.__traceback__ = self._trim_traceback(exc.__traceback__) - raise exc + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) + raise exc def transformString(self, instring): """ @@ -2072,11 +2039,10 @@ def transformString(self, instring): except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise - else: - # catch and re-raise exception from here, clearing out pyparsing internal stack trace - if getattr(exc, '__traceback__', None) is not None: - exc.__traceback__ = self._trim_traceback(exc.__traceback__) - raise exc + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) + raise exc def searchString(self, instring, maxMatches=_MAX_INT): """ @@ -2104,11 +2070,10 @@ def searchString(self, instring, maxMatches=_MAX_INT): except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise - else: - # catch and re-raise exception from here, clearing out pyparsing internal stack trace - if getattr(exc, '__traceback__', None) is not None: - exc.__traceback__ = self._trim_traceback(exc.__traceback__) - raise exc + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) + raise exc def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): """ @@ -2168,8 +2133,11 @@ def __add__(self, other): if isinstance(other, basestring): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + warnings.warn( + f"Cannot combine element of type {type(other)} with ParserElement", + SyntaxWarning, + stacklevel=2, + ) return None return And([self, other]) @@ -2183,8 +2151,11 @@ def __radd__(self, other): if isinstance(other, basestring): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + warnings.warn( + f"Cannot combine element of type {type(other)} with ParserElement", + SyntaxWarning, + stacklevel=2, + ) return None return other + self @@ -2195,8 +2166,11 @@ def __sub__(self, other): if isinstance(other, basestring): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + warnings.warn( + f"Cannot combine element of type {type(other)} with ParserElement", + SyntaxWarning, + stacklevel=2, + ) return None return self + And._ErrorStop() + other @@ -2207,8 +2181,11 @@ def __rsub__(self, other): if isinstance(other, basestring): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + warnings.warn( + f"Cannot combine element of type {type(other)} with ParserElement", + SyntaxWarning, + stacklevel=2, + ) return None return other - self @@ -2247,10 +2224,7 @@ def __mul__(self, other): if isinstance(other[0], int) and other[1] is None: if other[0] == 0: return ZeroOrMore(self) - if other[0] == 1: - return OneOrMore(self) - else: - return self * other[0] + ZeroOrMore(self) + return OneOrMore(self) if other[0] == 1 else self * other[0] + ZeroOrMore(self) elif isinstance(other[0], int) and isinstance(other[1], int): minElements, optElements = other optElements -= minElements @@ -2268,10 +2242,8 @@ def __mul__(self, other): if optElements: def makeOptionalList(n): - if n > 1: - return Optional(self + makeOptionalList(n - 1)) - else: - return Optional(self) + return Optional(self + makeOptionalList(n - 1)) if n > 1 else Optional(self) + if minElements: if minElements == 1: ret = self + makeOptionalList(optElements) @@ -2280,10 +2252,7 @@ def makeOptionalList(n): else: ret = makeOptionalList(optElements) else: - if minElements == 1: - ret = self - else: - ret = And([self] * minElements) + ret = self if minElements == 1 else And([self] * minElements) return ret def __rmul__(self, other): @@ -2299,8 +2268,11 @@ def __or__(self, other): if isinstance(other, basestring): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + warnings.warn( + f"Cannot combine element of type {type(other)} with ParserElement", + SyntaxWarning, + stacklevel=2, + ) return None return MatchFirst([self, other]) @@ -2311,8 +2283,11 @@ def __ror__(self, other): if isinstance(other, basestring): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + warnings.warn( + f"Cannot combine element of type {type(other)} with ParserElement", + SyntaxWarning, + stacklevel=2, + ) return None return other | self @@ -2323,8 +2298,11 @@ def __xor__(self, other): if isinstance(other, basestring): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + warnings.warn( + f"Cannot combine element of type {type(other)} with ParserElement", + SyntaxWarning, + stacklevel=2, + ) return None return Or([self, other]) @@ -2335,8 +2313,11 @@ def __rxor__(self, other): if isinstance(other, basestring): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + warnings.warn( + f"Cannot combine element of type {type(other)} with ParserElement", + SyntaxWarning, + stacklevel=2, + ) return None return other ^ self @@ -2347,8 +2328,11 @@ def __and__(self, other): if isinstance(other, basestring): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + warnings.warn( + f"Cannot combine element of type {type(other)} with ParserElement", + SyntaxWarning, + stacklevel=2, + ) return None return Each([self, other]) @@ -2359,8 +2343,11 @@ def __rand__(self, other): if isinstance(other, basestring): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), - SyntaxWarning, stacklevel=2) + warnings.warn( + f"Cannot combine element of type {type(other)} with ParserElement", + SyntaxWarning, + stacklevel=2, + ) return None return other & self @@ -2407,9 +2394,7 @@ def __getitem__(self, key): '... [{0}]'.format(len(key)) if len(key) > 5 else '')) - # clip to 2 elements - ret = self * tuple(key[:2]) - return ret + return self * tuple(key[:2]) def __call__(self, name=None): """ @@ -2426,10 +2411,7 @@ def __call__(self, name=None): userdata = Word(alphas).setResultsName("name") + Word(nums + "-").setResultsName("socsecno") userdata = Word(alphas)("name") + Word(nums + "-")("socsecno") """ - if name is not None: - return self._setResultsName(name) - else: - return self.copy() + return self._setResultsName(name) if name is not None else self.copy() def suppress(self): """ @@ -2578,11 +2560,10 @@ def parseFile(self, file_or_filename, parseAll=False): except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise - else: - # catch and re-raise exception from here, clearing out pyparsing internal stack trace - if getattr(exc, '__traceback__', None) is not None: - exc.__traceback__ = self._trim_traceback(exc.__traceback__) - raise exc + # catch and re-raise exception from here, clearing out pyparsing internal stack trace + if getattr(exc, '__traceback__', None) is not None: + exc.__traceback__ = self._trim_traceback(exc.__traceback__) + raise exc def __eq__(self, other): if self is other: @@ -2603,7 +2584,7 @@ def __req__(self, other): return self == other def __rne__(self, other): - return not (self == other) + return self != other def matches(self, testString, parseAll=True): """ @@ -2753,11 +2734,11 @@ def runTests(self, tests, parseAll=True, comment='#', out.append(' ' * (col(pe.loc, t) - 1) + '^' + fatal) else: out.append(' ' * pe.loc + '^' + fatal) - out.append("FAIL: " + str(pe)) + out.append(f"FAIL: {str(pe)}") success = success and failureTests result = pe except Exception as exc: - out.append("FAIL-EXCEPTION: " + str(exc)) + out.append(f"FAIL-EXCEPTION: {str(exc)}") success = success and failureTests result = exc else: @@ -2765,16 +2746,21 @@ def runTests(self, tests, parseAll=True, comment='#', if postParse is not None: try: pp_value = postParse(t, result) - if pp_value is not None: - if isinstance(pp_value, ParseResults): - out.append(pp_value.dump()) - else: - out.append(str(pp_value)) - else: + if pp_value is None: out.append(result.dump()) + elif isinstance(pp_value, ParseResults): + out.append(pp_value.dump()) + else: + out.append(str(pp_value)) except Exception as e: - out.append(result.dump(full=fullDump)) - out.append("{0} failed: {1}: {2}".format(postParse.__name__, type(e).__name__, e)) + out.extend( + ( + result.dump(full=fullDump), + "{0} failed: {1}: {2}".format( + postParse.__name__, type(e).__name__, e + ), + ) + ) else: out.append(result.dump(full=fullDump)) @@ -2805,10 +2791,12 @@ def must_skip(t): if not t._skipped or t._skipped.asList() == ['']: del t[0] t.pop("_skipped", None) + def show_skip(t): if t._skipped.asList()[-1:] == ['']: skipped = t.pop('_skipped') - t['_skipped'] = 'missing <' + repr(self.anchor) + '>' + t['_skipped'] = f'missing <{repr(self.anchor)}>' + return (self.anchor + skipper().addParseAction(must_skip) | skipper().addParseAction(show_skip)) + other @@ -2877,8 +2865,8 @@ def __init__(self, matchString): warnings.warn("null string passed to Literal; use Empty() instead", SyntaxWarning, stacklevel=2) self.__class__ = Empty - self.name = '"%s"' % _ustr(self.match) - self.errmsg = "Expected " + self.name + self.name = f'"{_ustr(self.match)}"' + self.errmsg = f"Expected {self.name}" self.mayReturnEmpty = False self.mayIndexError = False @@ -2939,8 +2927,8 @@ def __init__(self, matchString, identChars=None, caseless=False): except IndexError: warnings.warn("null string passed to Keyword; use Empty() instead", SyntaxWarning, stacklevel=2) - self.name = '"%s"' % self.match - self.errmsg = "Expected " + self.name + self.name = f'"{self.match}"' + self.errmsg = f"Expected {self.name}" self.mayReturnEmpty = False self.mayIndexError = False self.caseless = caseless @@ -2958,13 +2946,15 @@ def parseImpl(self, instring, loc, doActions=True): or instring[loc - 1].upper() not in self.identChars)): return loc + self.matchLen, self.match - else: - if instring[loc] == self.firstMatchChar: - if ((self.matchLen == 1 or instring.startswith(self.match, loc)) - and (loc >= len(instring) - self.matchLen - or instring[loc + self.matchLen] not in self.identChars) - and (loc == 0 or instring[loc - 1] not in self.identChars)): - return loc + self.matchLen, self.match + elif instring[loc] == self.firstMatchChar and ( + (self.matchLen == 1 or instring.startswith(self.match, loc)) + and ( + loc >= len(instring) - self.matchLen + or instring[loc + self.matchLen] not in self.identChars + ) + and (loc == 0 or instring[loc - 1] not in self.identChars) + ): + return loc + self.matchLen, self.match raise ParseException(instring, loc, self.errmsg, self) @@ -2994,8 +2984,8 @@ def __init__(self, matchString): super(CaselessLiteral, self).__init__(matchString.upper()) # Preserve the defining literal. self.returnString = matchString - self.name = "'%s'" % self.returnString - self.errmsg = "Expected " + self.name + self.name = f"'{self.returnString}'" + self.errmsg = f"Expected {self.name}" def parseImpl(self, instring, loc, doActions=True): if instring[loc:loc + self.matchLen].upper() == self.match: @@ -3160,29 +3150,23 @@ def __init__(self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=F self.minLen = min - if max > 0: - self.maxLen = max - else: - self.maxLen = _MAX_INT - + self.maxLen = max if max > 0 else _MAX_INT if exact > 0: self.maxLen = exact self.minLen = exact self.name = _ustr(self) - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" self.mayIndexError = False self.asKeyword = asKeyword if ' ' not in self.initCharsOrig + self.bodyCharsOrig and (min == 1 and max == 0 and exact == 0): if self.bodyCharsOrig == self.initCharsOrig: - self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) + self.reString = f"[{_escapeRegexRangeChars(self.initCharsOrig)}]+" elif len(self.initCharsOrig) == 1: - self.reString = "%s[%s]*" % (re.escape(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) + self.reString = f"{re.escape(self.initCharsOrig)}[{_escapeRegexRangeChars(self.bodyCharsOrig)}]*" else: - self.reString = "[%s][%s]*" % (_escapeRegexRangeChars(self.initCharsOrig), - _escapeRegexRangeChars(self.bodyCharsOrig),) + self.reString = f"[{_escapeRegexRangeChars(self.initCharsOrig)}][{_escapeRegexRangeChars(self.bodyCharsOrig)}]*" if self.asKeyword: self.reString = r"\b" + self.reString + r"\b" @@ -3231,15 +3215,12 @@ def __str__(self): if self.strRepr is None: def charsAsStr(s): - if len(s) > 4: - return s[:4] + "..." - else: - return s + return f"{s[:4]}..." if len(s) > 4 else s if self.initCharsOrig != self.bodyCharsOrig: - self.strRepr = "W:(%s, %s)" % (charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig)) + self.strRepr = f"W:({charsAsStr(self.initCharsOrig)}, {charsAsStr(self.bodyCharsOrig)})" else: - self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) + self.strRepr = f"W:({charsAsStr(self.initCharsOrig)})" return self.strRepr @@ -3260,7 +3241,7 @@ class Char(_WordRegex): """ def __init__(self, charset, asKeyword=False, excludeChars=None): super(Char, self).__init__(charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars) - self.reString = "[%s]" % _escapeRegexRangeChars(''.join(self.initChars)) + self.reString = f"[{_escapeRegexRangeChars(''.join(self.initChars))}]" if asKeyword: self.reString = r"\b%s\b" % self.reString self.re = re.compile(self.reString) @@ -3311,8 +3292,11 @@ def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False): self.re = re.compile(self.pattern, self.flags) self.reString = self.pattern except sre_constants.error: - warnings.warn("invalid pattern (%s) passed to Regex" % pattern, - SyntaxWarning, stacklevel=2) + warnings.warn( + f"invalid pattern ({pattern}) passed to Regex", + SyntaxWarning, + stacklevel=2, + ) raise elif hasattr(pattern, 'pattern') and hasattr(pattern, 'match'): @@ -3326,7 +3310,7 @@ def __init__(self, pattern, flags=0, asGroupList=False, asMatch=False): self.re_match = self.re.match self.name = _ustr(self) - self.errmsg = "Expected " + self.name + self.errmsg = f"Expected {self.name}" self.mayIndexError = False self.mayReturnEmpty = self.re_match("") is not None self.asGroupList = asGroupList @@ -3343,8 +3327,7 @@ def parseImpl(self, instring, loc, doActions=True): loc = result.end() ret = ParseResults(result.group()) - d = result.groupdict() - if d: + if d := result.groupdict(): for k, v in d.items(): ret[k] = v return loc, ret diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/retrying.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/retrying.py index f8d743b..88f02ac 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/retrying.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/retrying.py @@ -164,8 +164,7 @@ def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ wait_incrementing_start and incrementing by wait_incrementing_increment """ result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1)) - if result < 0: - result = 0 + result = max(result, 0) return result def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): @@ -173,8 +172,7 @@ def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_m result = self._wait_exponential_multiplier * exp if result > self._wait_exponential_max: result = self._wait_exponential_max - if result < 0: - result = 0 + result = max(result, 0) return result def never_reject(self, result): @@ -240,13 +238,12 @@ def get(self, wrap_exception=False): If wrap_exception is true, this Attempt is wrapped inside of a RetryError before being raised. """ - if self.has_exception: - if wrap_exception: - raise RetryError(self) - else: - six.reraise(self.value[0], self.value[1], self.value[2]) - else: + if not self.has_exception: return self.value + if wrap_exception: + raise RetryError(self) + else: + six.reraise(self.value[0], self.value[1], self.value[2]) def __repr__(self): if self.has_exception: diff --git a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/six.py b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/six.py index 19ceb0d..9083296 100644 --- a/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/six.py +++ b/template/python-waitress/venv/lib/python3.6/site-packages/pip/_vendor/six.py @@ -20,6 +20,7 @@ """Utilities for writing code that runs on Python 2 and 3""" + from __future__ import absolute_import import functools @@ -35,7 +36,7 @@ # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) +PY34 = sys.version_info[:2] >= (3, 4) if PY3: string_types = str, @@ -145,10 +146,7 @@ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): new_mod = name self.mod = new_mod if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr + new_attr = name if old_attr is None else old_attr self.attr = new_attr else: self.mod = old_mod @@ -176,21 +174,19 @@ def __init__(self, six_module_name): def _add_module(self, mod, *fullnames): for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod + self.known_modules[f"{self.name}.{fullname}"] = mod def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] + return self.known_modules[f"{self.name}.{fullname}"] def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None + return self if fullname in self.known_modules else None def __get_module(self, fullname): try: return self.known_modules[fullname] except KeyError: - raise ImportError("This loader does not know module " + fullname) + raise ImportError(f"This loader does not know module {fullname}") def load_module(self, fullname): try: @@ -235,7 +231,9 @@ class _MovedItems(_LazyModule): _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute( + "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse" + ), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), @@ -243,7 +241,12 @@ class _MovedItems(_LazyModule): MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("getoutput", "commands", "subprocess"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute( + "reload_module", + "__builtin__", + "importlib" if PY34 else "imp", + "reload", + ), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), @@ -252,14 +255,24 @@ class _MovedItems(_LazyModule): MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedAttribute( + "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest" + ), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), - MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), + MovedModule( + "collections_abc", + "collections", + "collections.abc" if sys.version_info >= (3, 3) else "collections", + ), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), + MovedModule( + "_dummy_thread", + "dummy_thread", + "_dummy_thread" if sys.version_info < (3, 9) else "_thread", + ), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), @@ -267,8 +280,14 @@ class _MovedItems(_LazyModule): MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule( + "email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart" + ), + MovedModule( + "email_mime_nonmultipart", + "email.MIMENonMultipart", + "email.mime.nonmultipart", + ), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), @@ -281,24 +300,37 @@ class _MovedItems(_LazyModule): MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule( + "tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext" + ), + MovedModule( + "tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog" + ), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), + MovedModule( + "tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser" + ), + MovedModule( + "tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog" + ), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule( + "tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog" + ), + MovedModule( + "urllib_parse", f"{__name__}.moves.urllib_parse", "urllib.parse" + ), + MovedModule( + "urllib_error", f"{__name__}.moves.urllib_error", "urllib.error" + ), + MovedModule( + "urllib", f"{__name__}.moves.urllib", f"{__name__}.moves.urllib" + ), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), @@ -312,20 +344,18 @@ class _MovedItems(_LazyModule): for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) + _importer._add_module(attr, f"moves.{attr.name}") del attr _MovedItems._moved_attributes = _moved_attributes -moves = _MovedItems(__name__ + ".moves") +moves = _MovedItems(f"{__name__}.moves") _importer._add_module(moves, "moves") class Module_six_moves_urllib_parse(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_parse""" - - _urllib_parse_moved_attributes = [ MovedAttribute("ParseResult", "urlparse", "urllib.parse"), MovedAttribute("SplitResult", "urlparse", "urllib.parse"), @@ -359,15 +389,16 @@ class Module_six_moves_urllib_parse(_LazyModule): Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") +_importer._add_module( + Module_six_moves_urllib_parse(f"{__name__}.moves.urllib_parse"), + "moves.urllib_parse", + "moves.urllib.parse", +) class Module_six_moves_urllib_error(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_error""" - - _urllib_error_moved_attributes = [ MovedAttribute("URLError", "urllib2", "urllib.error"), MovedAttribute("HTTPError", "urllib2", "urllib.error"), @@ -379,15 +410,16 @@ class Module_six_moves_urllib_error(_LazyModule): Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") +_importer._add_module( + Module_six_moves_urllib_error(f"{__name__}.moves.urllib.error"), + "moves.urllib_error", + "moves.urllib.error", +) class Module_six_moves_urllib_request(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_request""" - - _urllib_request_moved_attributes = [ MovedAttribute("urlopen", "urllib2", "urllib.request"), MovedAttribute("install_opener", "urllib2", "urllib.request"), @@ -431,15 +463,16 @@ class Module_six_moves_urllib_request(_LazyModule): Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") +_importer._add_module( + Module_six_moves_urllib_request(f"{__name__}.moves.urllib.request"), + "moves.urllib_request", + "moves.urllib.request", +) class Module_six_moves_urllib_response(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_response""" - - _urllib_response_moved_attributes = [ MovedAttribute("addbase", "urllib", "urllib.response"), MovedAttribute("addclosehook", "urllib", "urllib.response"), @@ -452,15 +485,16 @@ class Module_six_moves_urllib_response(_LazyModule): Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") +_importer._add_module( + Module_six_moves_urllib_response(f"{__name__}.moves.urllib.response"), + "moves.urllib_response", + "moves.urllib.response", +) class Module_six_moves_urllib_robotparser(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - _urllib_robotparser_moved_attributes = [ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), ] @@ -470,8 +504,13 @@ class Module_six_moves_urllib_robotparser(_LazyModule): Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") +_importer._add_module( + Module_six_moves_urllib_robotparser( + f"{__name__}.moves.urllib.robotparser" + ), + "moves.urllib_robotparser", + "moves.urllib.robotparser", +) class Module_six_moves_urllib(types.ModuleType): @@ -487,8 +526,9 @@ class Module_six_moves_urllib(types.ModuleType): def __dir__(self): return ['parse', 'error', 'request', 'response', 'robotparser'] -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") +_importer._add_module( + Module_six_moves_urllib(f"{__name__}.moves.urllib"), "moves.urllib" +) def add_move(move): @@ -894,7 +934,7 @@ def ensure_binary(s, encoding='utf-8', errors='strict'): return s if isinstance(s, text_type): return s.encode(encoding, errors) - raise TypeError("not expecting type '%s'" % type(s)) + raise TypeError(f"not expecting type '{type(s)}'") def ensure_str(s, encoding='utf-8', errors='strict'): @@ -916,7 +956,7 @@ def ensure_str(s, encoding='utf-8', errors='strict'): elif PY3 and isinstance(s, binary_type): return s.decode(encoding, errors) elif not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) + raise TypeError(f"not expecting type '{type(s)}'") return s @@ -936,7 +976,7 @@ def ensure_text(s, encoding='utf-8', errors='strict'): elif isinstance(s, text_type): return s else: - raise TypeError("not expecting type '%s'" % type(s)) + raise TypeError(f"not expecting type '{type(s)}'") def python_2_unicode_compatible(klass):