diff --git a/argusclient/client.py b/argusclient/client.py index 92e94a9..3c21370 100644 --- a/argusclient/client.py +++ b/argusclient/client.py @@ -695,6 +695,9 @@ def with_auth_token(*args, **kwargs): except ArgusAuthException: if argus.password: logging.debug("Token refresh failed, will attempt a fresh login", exc_info=True) + if argus.falconEnv: + logging.debug("Access Token and Refresh Token expired, acquire tokens again and follow next steps") + argus.refreshToken, argus.accessToken = promptForTokens() else: raise if not argus.accessToken and argus.password: @@ -712,6 +715,18 @@ def with_auth_token(*args, **kwargs): return with_auth_token +# Updates access token and refresh token from files. For mac access to falcon usecase +def promptForTokens(): + # Prompt for access token + filepath = raw_input("Provide the path to the file containing your new access token") + with open(filepath, 'r') as f: + access_token = f.read() + + # Prompt for refresh token + filepath = raw_input("Provide the path to the file containing your new refresh token") + with open(filepath, 'r') as f: + refresh_token = f.read() + return access_token.rstrip("\n"), refresh_token.rstrip("\n") class ArgusServiceClient(object): """ @@ -763,7 +778,7 @@ class ArgusServiceClient(object): """ - def __init__(self, user, password, endpoint, timeout=(10, 120), refreshToken=None, accessToken=None): + def __init__(self, user, password, endpoint, timeout=(10, 120), refreshToken=None, accessToken=None, falconEnv=False): """ Creates a new client object to interface with the Argus RESTful API. @@ -779,6 +794,8 @@ def __init__(self, user, password, endpoint, timeout=(10, 120), refreshToken=Non :type refreshToken: str :param accessToken: A token that can be used to authenticate with Argus. If a ``refreshToken`` or ``password`` is specified, the ``accessToken`` will be refreshed as and when it is needed. :type refreshToken: str + :param falconEnv: is falcon environment + :type falconEnv: bool """ if not user: raise ValueError("A valid user must be specified") @@ -788,6 +805,7 @@ def __init__(self, user, password, endpoint, timeout=(10, 120), refreshToken=Non raise ValueError("Need a valid Argus endpoint URL") self.user = user + self.falconEnv = falconEnv self.password = password self.endpoint = endpoint self.timeout = timeout diff --git a/venv/.Python b/venv/.Python new file mode 120000 index 0000000..cc24a1e --- /dev/null +++ b/venv/.Python @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/Python \ No newline at end of file diff --git a/venv/bin/activate b/venv/bin/activate new file mode 100644 index 0000000..07121f7 --- /dev/null +++ b/venv/bin/activate @@ -0,0 +1,84 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + + +if [ "${BASH_SOURCE-}" = "$0" ]; then + echo "You must source this script: \$ source $0" >&2 + exit 33 +fi + +deactivate () { + unset -f pydoc >/dev/null 2>&1 + + # reset old environment variables + # ! [ -z ${VAR+_} ] returns true if VAR is declared at all + if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then + PATH="$_OLD_VIRTUAL_PATH" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then + PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then + hash -r 2>/dev/null + fi + + if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then + PS1="$_OLD_VIRTUAL_PS1" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "${1-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/Users/vmeena/Desktop/git/python-argusclient/venv" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +if ! [ -z "${PYTHONHOME+_}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1-}" + if [ "x" != x ] ; then + PS1="${PS1-}" + else + PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}" + fi + export PS1 +fi + +# Make sure to unalias pydoc if it's already there +alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true + +pydoc () { + python -m pydoc "$@" +} + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then + hash -r 2>/dev/null +fi diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh new file mode 100644 index 0000000..9ec5d56 --- /dev/null +++ b/venv/bin/activate.csh @@ -0,0 +1,55 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . + +set newline='\ +' + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/Users/vmeena/Desktop/git/python-argusclient/venv" + +set _OLD_VIRTUAL_PATH="$PATH:q" +setenv PATH "$VIRTUAL_ENV:q/bin:$PATH:q" + + + +if ("" != "") then + set env_name = "" +else + set env_name = '('"$VIRTUAL_ENV:t:q"') ' +endif + +if ( $?VIRTUAL_ENV_DISABLE_PROMPT ) then + if ( $VIRTUAL_ENV_DISABLE_PROMPT == "" ) then + set do_prompt = "1" + else + set do_prompt = "0" + endif +else + set do_prompt = "1" +endif + +if ( $do_prompt == "1" ) then + # Could be in a non-interactive environment, + # in which case, $prompt is undefined and we wouldn't + # care about the prompt anyway. + if ( $?prompt ) then + set _OLD_VIRTUAL_PROMPT="$prompt:q" + if ( "$prompt:q" =~ *"$newline:q"* ) then + : + else + set prompt = "$env_name:q$prompt:q" + endif + endif +endif + +unset env_name +unset do_prompt + +alias pydoc python -m pydoc + +rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish new file mode 100644 index 0000000..1d4aebe --- /dev/null +++ b/venv/bin/activate.fish @@ -0,0 +1,102 @@ +# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*. +# Do not run it directly. + +function _bashify_path -d "Converts a fish path to something bash can recognize" + set fishy_path $argv + set bashy_path $fishy_path[1] + for path_part in $fishy_path[2..-1] + set bashy_path "$bashy_path:$path_part" + end + echo $bashy_path +end + +function _fishify_path -d "Converts a bash path to something fish can recognize" + echo $argv | tr ':' '\n' +end + +function deactivate -d 'Exit virtualenv mode and return to the normal environment.' + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + # https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling + if test (echo $FISH_VERSION | tr "." "\n")[1] -lt 3 + set -gx PATH (_fishify_path $_OLD_VIRTUAL_PATH) + else + set -gx PATH $_OLD_VIRTUAL_PATH + end + set -e _OLD_VIRTUAL_PATH + end + + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + and functions -q _old_fish_prompt + # Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`. + set -l fish_function_path + + # Erase virtualenv's `fish_prompt` and restore the original. + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + end + + set -e VIRTUAL_ENV + + if test "$argv[1]" != 'nondestructive' + # Self-destruct! + functions -e pydoc + functions -e deactivate + functions -e _bashify_path + functions -e _fishify_path + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV "/Users/vmeena/Desktop/git/python-argusclient/venv" + +# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling +if test (echo $FISH_VERSION | tr "." "\n")[1] -lt 3 + set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH) +else + set -gx _OLD_VIRTUAL_PATH $PATH +end +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# Unset `$PYTHONHOME` if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +function pydoc + python -m pydoc $argv +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # Copy the current `fish_prompt` function as `_old_fish_prompt`. + functions -c fish_prompt _old_fish_prompt + + function fish_prompt + # Save the current $status, for fish_prompts that display it. + set -l old_status $status + + # Prompt override provided? + # If not, just prepend the environment name. + if test -n "" + printf '%s%s' "" (set_color normal) + else + printf '%s(%s) ' (set_color normal) (basename "$VIRTUAL_ENV") + end + + # Restore the original $status + echo "exit $old_status" | source + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/venv/bin/activate.ps1 b/venv/bin/activate.ps1 new file mode 100644 index 0000000..95504d3 --- /dev/null +++ b/venv/bin/activate.ps1 @@ -0,0 +1,60 @@ +$script:THIS_PATH = $myinvocation.mycommand.path +$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent + +function global:deactivate([switch] $NonDestructive) { + if (Test-Path variable:_OLD_VIRTUAL_PATH) { + $env:PATH = $variable:_OLD_VIRTUAL_PATH + Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global + } + + if (Test-Path function:_old_virtual_prompt) { + $function:prompt = $function:_old_virtual_prompt + Remove-Item function:\_old_virtual_prompt + } + + if ($env:VIRTUAL_ENV) { + Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue + } + + if (!$NonDestructive) { + # Self destruct! + Remove-Item function:deactivate + Remove-Item function:pydoc + } +} + +function global:pydoc { + python -m pydoc $args +} + +# unset irrelevant variables +deactivate -nondestructive + +$VIRTUAL_ENV = $BASE_DIR +$env:VIRTUAL_ENV = $VIRTUAL_ENV + +New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH + +$env:PATH = "$env:VIRTUAL_ENV/bin:" + $env:PATH +if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) { + function global:_old_virtual_prompt { + "" + } + $function:_old_virtual_prompt = $function:prompt + + if ("" -ne "") { + function global:prompt { + # Add the custom prefix to the existing prompt + $previous_prompt_value = & $function:_old_virtual_prompt + ("" + $previous_prompt_value) + } + } + else { + function global:prompt { + # Add a prefix to the current prompt, but don't discard it. + $previous_prompt_value = & $function:_old_virtual_prompt + $new_prompt_value = "($( Split-Path $env:VIRTUAL_ENV -Leaf )) " + ($new_prompt_value + $previous_prompt_value) + } + } +} diff --git a/venv/bin/activate_this.py b/venv/bin/activate_this.py new file mode 100644 index 0000000..aa96457 --- /dev/null +++ b/venv/bin/activate_this.py @@ -0,0 +1,46 @@ +"""Activate virtualenv for current interpreter: + +Use exec(open(this_file).read(), {'__file__': this_file}). + +This can be used when you must use an existing Python interpreter, not the virtualenv bin/python. +""" +import os +import site +import sys + +try: + __file__ +except NameError: + raise AssertionError("You must use exec(open(this_file).read(), {'__file__': this_file}))") + +# prepend bin to PATH (this file is inside the bin directory) +bin_dir = os.path.dirname(os.path.abspath(__file__)) +os.environ["PATH"] = os.pathsep.join([bin_dir] + os.environ.get("PATH", "").split(os.pathsep)) + +base = os.path.dirname(bin_dir) + +# virtual env is right above bin directory +os.environ["VIRTUAL_ENV"] = base + +# add the virtual environments site-package to the host python import mechanism +IS_PYPY = hasattr(sys, "pypy_version_info") +IS_JYTHON = sys.platform.startswith("java") +if IS_JYTHON: + site_packages = os.path.join(base, "Lib", "site-packages") +elif IS_PYPY: + site_packages = os.path.join(base, "site-packages") +else: + IS_WIN = sys.platform == "win32" + if IS_WIN: + site_packages = os.path.join(base, "Lib", "site-packages") + else: + site_packages = os.path.join(base, "lib", "python{}.{}".format(*sys.version_info), "site-packages") + +prev = set(sys.path) +site.addsitedir(site_packages) +sys.real_prefix = sys.prefix +sys.prefix = base + +# Move the added items to the front of the path, in place +new = list(sys.path) +sys.path[:] = [i for i in new if i not in prev] + [i for i in new if i in prev] diff --git a/venv/bin/chardetect b/venv/bin/chardetect new file mode 100755 index 0000000..688c3b1 --- /dev/null +++ b/venv/bin/chardetect @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from chardet.cli.chardetect import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/easy_install b/venv/bin/easy_install new file mode 100755 index 0000000..8b122b3 --- /dev/null +++ b/venv/bin/easy_install @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/easy_install-2.7 b/venv/bin/easy_install-2.7 new file mode 100755 index 0000000..8b122b3 --- /dev/null +++ b/venv/bin/easy_install-2.7 @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/f2py b/venv/bin/f2py new file mode 100755 index 0000000..0a88a43 --- /dev/null +++ b/venv/bin/f2py @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from numpy.f2py.f2py2e import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/f2py2 b/venv/bin/f2py2 new file mode 100755 index 0000000..0a88a43 --- /dev/null +++ b/venv/bin/f2py2 @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from numpy.f2py.f2py2e import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/f2py2.7 b/venv/bin/f2py2.7 new file mode 100755 index 0000000..0a88a43 --- /dev/null +++ b/venv/bin/f2py2.7 @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from numpy.f2py.f2py2e import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/futurize b/venv/bin/futurize new file mode 100755 index 0000000..9ea940a --- /dev/null +++ b/venv/bin/futurize @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from libfuturize.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/jenkins_invoke b/venv/bin/jenkins_invoke new file mode 100755 index 0000000..ac379ff --- /dev/null +++ b/venv/bin/jenkins_invoke @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from jenkinsapi.command_line.jenkins_invoke import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/jenkinsapi_version b/venv/bin/jenkinsapi_version new file mode 100755 index 0000000..387953b --- /dev/null +++ b/venv/bin/jenkinsapi_version @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from jenkinsapi.command_line.jenkinsapi_version import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/keyring b/venv/bin/keyring new file mode 100755 index 0000000..a5d7920 --- /dev/null +++ b/venv/bin/keyring @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from keyring.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/moncfg b/venv/bin/moncfg new file mode 100755 index 0000000..3be7d4c --- /dev/null +++ b/venv/bin/moncfg @@ -0,0 +1,3 @@ +#!/bin/bash -e + +python -m moncfg $@ diff --git a/venv/bin/pasteurize b/venv/bin/pasteurize new file mode 100755 index 0000000..6a952ed --- /dev/null +++ b/venv/bin/pasteurize @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from libpasteurize.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip b/venv/bin/pip new file mode 100755 index 0000000..ea12cc8 --- /dev/null +++ b/venv/bin/pip @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip2 b/venv/bin/pip2 new file mode 100755 index 0000000..ea12cc8 --- /dev/null +++ b/venv/bin/pip2 @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip2.7 b/venv/bin/pip2.7 new file mode 100755 index 0000000..ea12cc8 --- /dev/null +++ b/venv/bin/pip2.7 @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pkginfo b/venv/bin/pkginfo new file mode 100755 index 0000000..1319344 --- /dev/null +++ b/venv/bin/pkginfo @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pkginfo.commandline import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/py.test b/venv/bin/py.test new file mode 100755 index 0000000..841d507 --- /dev/null +++ b/venv/bin/py.test @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pytest import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pygmentize b/venv/bin/pygmentize new file mode 100755 index 0000000..fceacfb --- /dev/null +++ b/venv/bin/pygmentize @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pygments.cmdline import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pytest b/venv/bin/pytest new file mode 100755 index 0000000..841d507 --- /dev/null +++ b/venv/bin/pytest @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from pytest import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/python b/venv/bin/python new file mode 100755 index 0000000..d34b1b3 Binary files /dev/null and b/venv/bin/python differ diff --git a/venv/bin/python-config b/venv/bin/python-config new file mode 100755 index 0000000..8695e06 --- /dev/null +++ b/venv/bin/python-config @@ -0,0 +1,78 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +import sys +import getopt +import sysconfig + +valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags', + 'ldflags', 'help'] + +if sys.version_info >= (3, 2): + valid_opts.insert(-1, 'extension-suffix') + valid_opts.append('abiflags') +if sys.version_info >= (3, 3): + valid_opts.append('configdir') + + +def exit_with_usage(code=1): + sys.stderr.write("Usage: {0} [{1}]\n".format( + sys.argv[0], '|'.join('--'+opt for opt in valid_opts))) + sys.exit(code) + +try: + opts, args = getopt.getopt(sys.argv[1:], '', valid_opts) +except getopt.error: + exit_with_usage() + +if not opts: + exit_with_usage() + +pyver = sysconfig.get_config_var('VERSION') +getvar = sysconfig.get_config_var + +opt_flags = [flag for (flag, val) in opts] + +if '--help' in opt_flags: + exit_with_usage(code=0) + +for opt in opt_flags: + if opt == '--prefix': + print(sysconfig.get_config_var('prefix')) + + elif opt == '--exec-prefix': + print(sysconfig.get_config_var('exec_prefix')) + + elif opt in ('--includes', '--cflags'): + flags = ['-I' + sysconfig.get_path('include'), + '-I' + sysconfig.get_path('platinclude')] + if opt == '--cflags': + flags.extend(getvar('CFLAGS').split()) + print(' '.join(flags)) + + elif opt in ('--libs', '--ldflags'): + abiflags = getattr(sys, 'abiflags', '') + libs = ['-lpython' + pyver + abiflags] + libs += getvar('LIBS').split() + libs += getvar('SYSLIBS').split() + # add the prefix/lib/pythonX.Y/config dir, but only if there is no + # shared library in prefix/lib/. + if opt == '--ldflags': + if not getvar('Py_ENABLE_SHARED'): + libs.insert(0, '-L' + getvar('LIBPL')) + if not getvar('PYTHONFRAMEWORK'): + libs.extend(getvar('LINKFORSHARED').split()) + print(' '.join(libs)) + + elif opt == '--extension-suffix': + ext_suffix = sysconfig.get_config_var('EXT_SUFFIX') + if ext_suffix is None: + ext_suffix = sysconfig.get_config_var('SO') + print(ext_suffix) + + elif opt == '--abiflags': + if not getattr(sys, 'abiflags', None): + exit_with_usage() + print(sys.abiflags) + + elif opt == '--configdir': + print(sysconfig.get_config_var('LIBPL')) diff --git a/venv/bin/python2 b/venv/bin/python2 new file mode 120000 index 0000000..d8654aa --- /dev/null +++ b/venv/bin/python2 @@ -0,0 +1 @@ +python \ No newline at end of file diff --git a/venv/bin/python2.7 b/venv/bin/python2.7 new file mode 120000 index 0000000..d8654aa --- /dev/null +++ b/venv/bin/python2.7 @@ -0,0 +1 @@ +python \ No newline at end of file diff --git a/venv/bin/rst2html.py b/venv/bin/rst2html.py new file mode 100755 index 0000000..c471c62 --- /dev/null +++ b/venv/bin/rst2html.py @@ -0,0 +1,23 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# $Id: rst2html.py 4564 2006-05-21 20:44:42Z wiemann $ +# Author: David Goodger +# Copyright: This module has been placed in the public domain. + +""" +A minimal front end to the Docutils Publisher, producing HTML. +""" + +try: + import locale + locale.setlocale(locale.LC_ALL, '') +except: + pass + +from docutils.core import publish_cmdline, default_description + + +description = ('Generates (X)HTML documents from standalone reStructuredText ' + 'sources. ' + default_description) + +publish_cmdline(writer_name='html', description=description) diff --git a/venv/bin/rst2html4.py b/venv/bin/rst2html4.py new file mode 100755 index 0000000..3730447 --- /dev/null +++ b/venv/bin/rst2html4.py @@ -0,0 +1,26 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# $Id: rst2html4.py 7994 2016-12-10 17:41:45Z milde $ +# Author: David Goodger +# Copyright: This module has been placed in the public domain. + +""" +A minimal front end to the Docutils Publisher, producing (X)HTML. + +The output conforms to XHTML 1.0 transitional +and almost to HTML 4.01 transitional (except for closing empty tags). +""" + +try: + import locale + locale.setlocale(locale.LC_ALL, '') +except: + pass + +from docutils.core import publish_cmdline, default_description + + +description = ('Generates (X)HTML documents from standalone reStructuredText ' + 'sources. ' + default_description) + +publish_cmdline(writer_name='html4', description=description) diff --git a/venv/bin/rst2html5.py b/venv/bin/rst2html5.py new file mode 100755 index 0000000..4042451 --- /dev/null +++ b/venv/bin/rst2html5.py @@ -0,0 +1,34 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf8 -*- +# :Copyright: © 2015 Günter Milde. +# :License: Released under the terms of the `2-Clause BSD license`_, in short: +# +# Copying and distribution of this file, with or without modification, +# are permitted in any medium without royalty provided the copyright +# notice and this notice are preserved. +# This file is offered as-is, without any warranty. +# +# .. _2-Clause BSD license: https://opensource.org/licenses/BSD-2-Clause +# +# Revision: $Revision: 8567 $ +# Date: $Date: 2020-09-30 13:57:21 +0200 (Mi, 30. Sep 2020) $ + +""" +A minimal front end to the Docutils Publisher, producing HTML 5 documents. + +The output is also valid XML. +""" + +try: + import locale # module missing in Jython + locale.setlocale(locale.LC_ALL, '') +except locale.Error: + pass + +from docutils.core import publish_cmdline, default_description + +description = (u'Generates HTML5 documents from standalone ' + u'reStructuredText sources.\n' + + default_description) + +publish_cmdline(writer_name='html5', description=description) diff --git a/venv/bin/rst2latex.py b/venv/bin/rst2latex.py new file mode 100755 index 0000000..d0eec85 --- /dev/null +++ b/venv/bin/rst2latex.py @@ -0,0 +1,26 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# $Id: rst2latex.py 5905 2009-04-16 12:04:49Z milde $ +# Author: David Goodger +# Copyright: This module has been placed in the public domain. + +""" +A minimal front end to the Docutils Publisher, producing LaTeX. +""" + +try: + import locale + locale.setlocale(locale.LC_ALL, '') +except: + pass + +from docutils.core import publish_cmdline + +description = ('Generates LaTeX documents from standalone reStructuredText ' + 'sources. ' + 'Reads from (default is stdin) and writes to ' + ' (default is stdout). See ' + ' for ' + 'the full reference.') + +publish_cmdline(writer_name='latex', description=description) diff --git a/venv/bin/rst2man.py b/venv/bin/rst2man.py new file mode 100755 index 0000000..6fb2594 --- /dev/null +++ b/venv/bin/rst2man.py @@ -0,0 +1,26 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# Author: +# Contact: grubert@users.sf.net +# Copyright: This module has been placed in the public domain. + +""" +man.py +====== + +This module provides a simple command line interface that uses the +man page writer to output from ReStructuredText source. +""" + +import locale +try: + locale.setlocale(locale.LC_ALL, '') +except: + pass + +from docutils.core import publish_cmdline, default_description +from docutils.writers import manpage + +description = ("Generates plain unix manual documents. " + default_description) + +publish_cmdline(writer=manpage.Writer(), description=description) diff --git a/venv/bin/rst2odt.py b/venv/bin/rst2odt.py new file mode 100755 index 0000000..62ba6b3 --- /dev/null +++ b/venv/bin/rst2odt.py @@ -0,0 +1,30 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# $Id: rst2odt.py 5839 2009-01-07 19:09:28Z dkuhlman $ +# Author: Dave Kuhlman +# Copyright: This module has been placed in the public domain. + +""" +A front end to the Docutils Publisher, producing OpenOffice documents. +""" + +import sys +try: + import locale + locale.setlocale(locale.LC_ALL, '') +except: + pass + +from docutils.core import publish_cmdline_to_binary, default_description +from docutils.writers.odf_odt import Writer, Reader + + +description = ('Generates OpenDocument/OpenOffice/ODF documents from ' + 'standalone reStructuredText sources. ' + default_description) + + +writer = Writer() +reader = Reader() +output = publish_cmdline_to_binary(reader=reader, writer=writer, + description=description) + diff --git a/venv/bin/rst2odt_prepstyles.py b/venv/bin/rst2odt_prepstyles.py new file mode 100755 index 0000000..6d9d0b3 --- /dev/null +++ b/venv/bin/rst2odt_prepstyles.py @@ -0,0 +1,67 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# $Id: rst2odt_prepstyles.py 8346 2019-08-26 12:11:32Z milde $ +# Author: Dave Kuhlman +# Copyright: This module has been placed in the public domain. + +""" +Fix a word-processor-generated styles.odt for odtwriter use: Drop page size +specifications from styles.xml in STYLE_FILE.odt. +""" + +# Author: Michael Schutte + +from __future__ import print_function + +from lxml import etree +import sys +import zipfile +from tempfile import mkstemp +import shutil +import os + +NAMESPACES = { + "style": "urn:oasis:names:tc:opendocument:xmlns:style:1.0", + "fo": "urn:oasis:names:tc:opendocument:xmlns:xsl-fo-compatible:1.0" +} + + +def prepstyle(filename): + + zin = zipfile.ZipFile(filename) + styles = zin.read("styles.xml") + + root = etree.fromstring(styles) + for el in root.xpath("//style:page-layout-properties", + namespaces=NAMESPACES): + for attr in el.attrib: + if attr.startswith("{%s}" % NAMESPACES["fo"]): + del el.attrib[attr] + + tempname = mkstemp() + zout = zipfile.ZipFile(os.fdopen(tempname[0], "w"), "w", + zipfile.ZIP_DEFLATED) + + for item in zin.infolist(): + if item.filename == "styles.xml": + zout.writestr(item, etree.tostring(root)) + else: + zout.writestr(item, zin.read(item.filename)) + + zout.close() + zin.close() + shutil.move(tempname[1], filename) + + +def main(): + args = sys.argv[1:] + if len(args) != 1: + print(__doc__, file=sys.stderr) + print("Usage: %s STYLE_FILE.odt\n" % sys.argv[0], file=sys.stderr) + sys.exit(1) + filename = args[0] + prepstyle(filename) + + +if __name__ == '__main__': + main() diff --git a/venv/bin/rst2pseudoxml.py b/venv/bin/rst2pseudoxml.py new file mode 100755 index 0000000..42997c1 --- /dev/null +++ b/venv/bin/rst2pseudoxml.py @@ -0,0 +1,23 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# $Id: rst2pseudoxml.py 4564 2006-05-21 20:44:42Z wiemann $ +# Author: David Goodger +# Copyright: This module has been placed in the public domain. + +""" +A minimal front end to the Docutils Publisher, producing pseudo-XML. +""" + +try: + import locale + locale.setlocale(locale.LC_ALL, '') +except: + pass + +from docutils.core import publish_cmdline, default_description + + +description = ('Generates pseudo-XML from standalone reStructuredText ' + 'sources (for testing purposes). ' + default_description) + +publish_cmdline(description=description) diff --git a/venv/bin/rst2s5.py b/venv/bin/rst2s5.py new file mode 100755 index 0000000..b0340fa --- /dev/null +++ b/venv/bin/rst2s5.py @@ -0,0 +1,24 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# $Id: rst2s5.py 4564 2006-05-21 20:44:42Z wiemann $ +# Author: Chris Liechti +# Copyright: This module has been placed in the public domain. + +""" +A minimal front end to the Docutils Publisher, producing HTML slides using +the S5 template system. +""" + +try: + import locale + locale.setlocale(locale.LC_ALL, '') +except: + pass + +from docutils.core import publish_cmdline, default_description + + +description = ('Generates S5 (X)HTML slideshow documents from standalone ' + 'reStructuredText sources. ' + default_description) + +publish_cmdline(writer_name='s5', description=description) diff --git a/venv/bin/rst2xetex.py b/venv/bin/rst2xetex.py new file mode 100755 index 0000000..a0c302f --- /dev/null +++ b/venv/bin/rst2xetex.py @@ -0,0 +1,27 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# $Id: rst2xetex.py 7847 2015-03-17 17:30:47Z milde $ +# Author: Guenter Milde +# Copyright: This module has been placed in the public domain. + +""" +A minimal front end to the Docutils Publisher, producing Lua/XeLaTeX code. +""" + +try: + import locale + locale.setlocale(locale.LC_ALL, '') +except: + pass + +from docutils.core import publish_cmdline + +description = ('Generates LaTeX documents from standalone reStructuredText ' + 'sources for compilation with the Unicode-aware TeX variants ' + 'XeLaTeX or LuaLaTeX. ' + 'Reads from (default is stdin) and writes to ' + ' (default is stdout). See ' + ' for ' + 'the full reference.') + +publish_cmdline(writer_name='xetex', description=description) diff --git a/venv/bin/rst2xml.py b/venv/bin/rst2xml.py new file mode 100755 index 0000000..db29b9f --- /dev/null +++ b/venv/bin/rst2xml.py @@ -0,0 +1,23 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# $Id: rst2xml.py 4564 2006-05-21 20:44:42Z wiemann $ +# Author: David Goodger +# Copyright: This module has been placed in the public domain. + +""" +A minimal front end to the Docutils Publisher, producing Docutils XML. +""" + +try: + import locale + locale.setlocale(locale.LC_ALL, '') +except: + pass + +from docutils.core import publish_cmdline, default_description + + +description = ('Generates Docutils-native XML from standalone ' + 'reStructuredText sources. ' + default_description) + +publish_cmdline(writer_name='xml', description=description) diff --git a/venv/bin/rstpep2html.py b/venv/bin/rstpep2html.py new file mode 100755 index 0000000..9339ac2 --- /dev/null +++ b/venv/bin/rstpep2html.py @@ -0,0 +1,25 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python + +# $Id: rstpep2html.py 4564 2006-05-21 20:44:42Z wiemann $ +# Author: David Goodger +# Copyright: This module has been placed in the public domain. + +""" +A minimal front end to the Docutils Publisher, producing HTML from PEP +(Python Enhancement Proposal) documents. +""" + +try: + import locale + locale.setlocale(locale.LC_ALL, '') +except: + pass + +from docutils.core import publish_cmdline, default_description + + +description = ('Generates (X)HTML from reStructuredText-format PEP files. ' + + default_description) + +publish_cmdline(reader_name='pep', writer_name='pep_html', + description=description) diff --git a/venv/bin/tqdm b/venv/bin/tqdm new file mode 100755 index 0000000..2125857 --- /dev/null +++ b/venv/bin/tqdm @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from tqdm.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/twine b/venv/bin/twine new file mode 100755 index 0000000..2832d41 --- /dev/null +++ b/venv/bin/twine @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from twine.__main__ import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/wheel b/venv/bin/wheel new file mode 100755 index 0000000..86221b3 --- /dev/null +++ b/venv/bin/wheel @@ -0,0 +1,8 @@ +#!/Users/vmeena/Desktop/git/python-argusclient/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from wheel.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/include/python2.7 b/venv/include/python2.7 new file mode 120000 index 0000000..3fe034f --- /dev/null +++ b/venv/include/python2.7 @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/include/python2.7 \ No newline at end of file diff --git a/venv/lib/python2.7/LICENSE.txt b/venv/lib/python2.7/LICENSE.txt new file mode 120000 index 0000000..1a3e118 --- /dev/null +++ b/venv/lib/python2.7/LICENSE.txt @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/LICENSE.txt \ No newline at end of file diff --git a/venv/lib/python2.7/UserDict.py b/venv/lib/python2.7/UserDict.py new file mode 120000 index 0000000..b735f02 --- /dev/null +++ b/venv/lib/python2.7/UserDict.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/UserDict.py \ No newline at end of file diff --git a/venv/lib/python2.7/_abcoll.py b/venv/lib/python2.7/_abcoll.py new file mode 120000 index 0000000..4a595bc --- /dev/null +++ b/venv/lib/python2.7/_abcoll.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/_abcoll.py \ No newline at end of file diff --git a/venv/lib/python2.7/_weakrefset.py b/venv/lib/python2.7/_weakrefset.py new file mode 120000 index 0000000..b8b09b7 --- /dev/null +++ b/venv/lib/python2.7/_weakrefset.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/_weakrefset.py \ No newline at end of file diff --git a/venv/lib/python2.7/abc.py b/venv/lib/python2.7/abc.py new file mode 120000 index 0000000..87956e5 --- /dev/null +++ b/venv/lib/python2.7/abc.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/abc.py \ No newline at end of file diff --git a/venv/lib/python2.7/codecs.py b/venv/lib/python2.7/codecs.py new file mode 120000 index 0000000..b18c8d6 --- /dev/null +++ b/venv/lib/python2.7/codecs.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/codecs.py \ No newline at end of file diff --git a/venv/lib/python2.7/config b/venv/lib/python2.7/config new file mode 120000 index 0000000..88ddfa1 --- /dev/null +++ b/venv/lib/python2.7/config @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/config \ No newline at end of file diff --git a/venv/lib/python2.7/copy_reg.py b/venv/lib/python2.7/copy_reg.py new file mode 120000 index 0000000..8d0265c --- /dev/null +++ b/venv/lib/python2.7/copy_reg.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/copy_reg.py \ No newline at end of file diff --git a/venv/lib/python2.7/distutils/__init__.py b/venv/lib/python2.7/distutils/__init__.py new file mode 100644 index 0000000..b9b0f24 --- /dev/null +++ b/venv/lib/python2.7/distutils/__init__.py @@ -0,0 +1,134 @@ +import os +import sys +import warnings + +# opcode is not a virtualenv module, so we can use it to find the stdlib +# Important! To work on pypy, this must be a module that resides in the +# lib-python/modified-x.y.z directory +import opcode + +dirname = os.path.dirname + +distutils_path = os.path.join(os.path.dirname(opcode.__file__), "distutils") +if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)): + warnings.warn("The virtualenv distutils package at %s appears to be in the same location as the system distutils?") +else: + __path__.insert(0, distutils_path) # noqa: F821 + if sys.version_info < (3, 4): + import imp + + real_distutils = imp.load_module("_virtualenv_distutils", None, distutils_path, ("", "", imp.PKG_DIRECTORY)) + else: + import importlib.machinery + + distutils_path = os.path.join(distutils_path, "__init__.py") + loader = importlib.machinery.SourceFileLoader("_virtualenv_distutils", distutils_path) + if sys.version_info < (3, 5): + import types + + real_distutils = types.ModuleType(loader.name) + else: + import importlib.util + + spec = importlib.util.spec_from_loader(loader.name, loader) + real_distutils = importlib.util.module_from_spec(spec) + loader.exec_module(real_distutils) + + # Copy the relevant attributes + try: + __revision__ = real_distutils.__revision__ + except AttributeError: + pass + __version__ = real_distutils.__version__ + +from distutils import dist, sysconfig # isort:skip + +try: + basestring +except NameError: + basestring = str + +# patch build_ext (distutils doesn't know how to get the libs directory +# path on windows - it hardcodes the paths around the patched sys.prefix) + +if sys.platform == "win32": + from distutils.command.build_ext import build_ext as old_build_ext + + class build_ext(old_build_ext): + def finalize_options(self): + if self.library_dirs is None: + self.library_dirs = [] + elif isinstance(self.library_dirs, basestring): + self.library_dirs = self.library_dirs.split(os.pathsep) + + self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs")) + old_build_ext.finalize_options(self) + + from distutils.command import build_ext as build_ext_module + + build_ext_module.build_ext = build_ext + +# distutils.dist patches: + +old_find_config_files = dist.Distribution.find_config_files + + +def find_config_files(self): + found = old_find_config_files(self) + if os.name == "posix": + user_filename = ".pydistutils.cfg" + else: + user_filename = "pydistutils.cfg" + user_filename = os.path.join(sys.prefix, user_filename) + if os.path.isfile(user_filename): + for item in list(found): + if item.endswith("pydistutils.cfg"): + found.remove(item) + found.append(user_filename) + return found + + +dist.Distribution.find_config_files = find_config_files + +# distutils.sysconfig patches: + +old_get_python_inc = sysconfig.get_python_inc + + +def sysconfig_get_python_inc(plat_specific=0, prefix=None): + if prefix is None: + prefix = sys.real_prefix + return old_get_python_inc(plat_specific, prefix) + + +sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__ +sysconfig.get_python_inc = sysconfig_get_python_inc + +old_get_python_lib = sysconfig.get_python_lib + + +def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None): + if standard_lib and prefix is None: + prefix = sys.real_prefix + return old_get_python_lib(plat_specific, standard_lib, prefix) + + +sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__ +sysconfig.get_python_lib = sysconfig_get_python_lib + +old_get_config_vars = sysconfig.get_config_vars + + +def sysconfig_get_config_vars(*args): + real_vars = old_get_config_vars(*args) + if sys.platform == "win32": + lib_dir = os.path.join(sys.real_prefix, "libs") + if isinstance(real_vars, dict) and "LIBDIR" not in real_vars: + real_vars["LIBDIR"] = lib_dir # asked for all + elif isinstance(real_vars, list) and "LIBDIR" in args: + real_vars = real_vars + [lib_dir] # asked for list + return real_vars + + +sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__ +sysconfig.get_config_vars = sysconfig_get_config_vars diff --git a/venv/lib/python2.7/distutils/distutils.cfg b/venv/lib/python2.7/distutils/distutils.cfg new file mode 100644 index 0000000..1af230e --- /dev/null +++ b/venv/lib/python2.7/distutils/distutils.cfg @@ -0,0 +1,6 @@ +# This is a config file local to this virtualenv installation +# You may include options that will be used by all distutils commands, +# and by easy_install. For instance: +# +# [easy_install] +# find_links = http://mylocalsite diff --git a/venv/lib/python2.7/encodings b/venv/lib/python2.7/encodings new file mode 120000 index 0000000..8732f85 --- /dev/null +++ b/venv/lib/python2.7/encodings @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/encodings \ No newline at end of file diff --git a/venv/lib/python2.7/fnmatch.py b/venv/lib/python2.7/fnmatch.py new file mode 120000 index 0000000..49b6bc0 --- /dev/null +++ b/venv/lib/python2.7/fnmatch.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/fnmatch.py \ No newline at end of file diff --git a/venv/lib/python2.7/genericpath.py b/venv/lib/python2.7/genericpath.py new file mode 120000 index 0000000..7843bce --- /dev/null +++ b/venv/lib/python2.7/genericpath.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/genericpath.py \ No newline at end of file diff --git a/venv/lib/python2.7/lib-dynload b/venv/lib/python2.7/lib-dynload new file mode 120000 index 0000000..24c555e --- /dev/null +++ b/venv/lib/python2.7/lib-dynload @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/lib-dynload \ No newline at end of file diff --git a/venv/lib/python2.7/linecache.py b/venv/lib/python2.7/linecache.py new file mode 120000 index 0000000..1f79a61 --- /dev/null +++ b/venv/lib/python2.7/linecache.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/linecache.py \ No newline at end of file diff --git a/venv/lib/python2.7/locale.py b/venv/lib/python2.7/locale.py new file mode 120000 index 0000000..cc8a5a7 --- /dev/null +++ b/venv/lib/python2.7/locale.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/locale.py \ No newline at end of file diff --git a/venv/lib/python2.7/no-global-site-packages.txt b/venv/lib/python2.7/no-global-site-packages.txt new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python2.7/ntpath.py b/venv/lib/python2.7/ntpath.py new file mode 120000 index 0000000..af0bbe7 --- /dev/null +++ b/venv/lib/python2.7/ntpath.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/ntpath.py \ No newline at end of file diff --git a/venv/lib/python2.7/orig-prefix.txt b/venv/lib/python2.7/orig-prefix.txt new file mode 100644 index 0000000..2a45120 --- /dev/null +++ b/venv/lib/python2.7/orig-prefix.txt @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7 \ No newline at end of file diff --git a/venv/lib/python2.7/os.py b/venv/lib/python2.7/os.py new file mode 120000 index 0000000..04db928 --- /dev/null +++ b/venv/lib/python2.7/os.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/os.py \ No newline at end of file diff --git a/venv/lib/python2.7/posixpath.py b/venv/lib/python2.7/posixpath.py new file mode 120000 index 0000000..cc89aa2 --- /dev/null +++ b/venv/lib/python2.7/posixpath.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/posixpath.py \ No newline at end of file diff --git a/venv/lib/python2.7/re.py b/venv/lib/python2.7/re.py new file mode 120000 index 0000000..b1a8e65 --- /dev/null +++ b/venv/lib/python2.7/re.py @@ -0,0 +1 @@ +/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py \ No newline at end of file diff --git a/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/INSTALLER b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/LICENSE b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/LICENSE new file mode 100644 index 0000000..2f1b8e1 --- /dev/null +++ b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2017-2021 Ingy döt Net +Copyright (c) 2006-2016 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/METADATA b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/METADATA new file mode 100644 index 0000000..565f05b --- /dev/null +++ b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/METADATA @@ -0,0 +1,46 @@ +Metadata-Version: 2.1 +Name: PyYAML +Version: 5.4.1 +Summary: YAML parser and emitter for Python +Home-page: https://pyyaml.org/ +Author: Kirill Simonov +Author-email: xi@resolvent.net +License: MIT +Download-URL: https://pypi.org/project/PyYAML/ +Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues +Project-URL: CI, https://github.com/yaml/pyyaml/actions +Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation +Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core +Project-URL: Source Code, https://github.com/yaml/pyyaml +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.* + +YAML is a data serialization format designed for human readability +and interaction with scripting languages. PyYAML is a YAML parser +and emitter for Python. + +PyYAML features a complete YAML 1.1 parser, Unicode support, pickle +support, capable extension API, and sensible error messages. PyYAML +supports standard YAML tags and provides Python-specific tags that +allow to represent an arbitrary Python object. + +PyYAML is applicable for a broad range of tasks from complex +configuration files to object serialization and persistence. + diff --git a/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/RECORD b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/RECORD new file mode 100644 index 0000000..84b28e3 --- /dev/null +++ b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/RECORD @@ -0,0 +1,60 @@ +PyYAML-5.4.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PyYAML-5.4.1.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 +PyYAML-5.4.1.dist-info/METADATA,sha256=XnrM5LY-uS85ica26gKUK0dGG-xmPjmGfDTSLpIHQFk,2087 +PyYAML-5.4.1.dist-info/RECORD,, +PyYAML-5.4.1.dist-info/WHEEL,sha256=TEulmP8aESrlpOaeT7sHBEdRaXX7UOayRn19--7H3_Y,110 +PyYAML-5.4.1.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 +_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 +_yaml/__init__.pyc,, +yaml/__init__.py,sha256=SjHy5fIIILlIWj36OoqpZnx-0dmZBYizM2mJyhrpSh8,13340 +yaml/__init__.pyc,, +yaml/__init__.pyc,sha256=hw4n8op-46oninjZrpdTMYwxGbcH0RJQnsU8WJYWHb8,14571 +yaml/_yaml.so,sha256=I0EMVm3AIxj16wQN5zMj_S9zmW4ogZYhLoDC4-fzH7Y,411744 +yaml/composer.py,sha256=TtvWJCaffdVHhHx_muLguloDUwg1NUbo-GlJ0me6xuA,4923 +yaml/composer.pyc,, +yaml/composer.pyc,sha256=HxuBgcd5i17whDbAYtAtj94otsu3lE0qusU7yMODNGY,4506 +yaml/constructor.py,sha256=tmX35gh5nu6Q9ocgty2PIKTGyUsB7yd6Ox_cVa6WrvE,28959 +yaml/constructor.pyc,, +yaml/constructor.pyc,sha256=6-Dc45Yk9dodgc7vKa3m2s4MFVE2c-KGFYr--lDxBso,26120 +yaml/cyaml.py,sha256=rlCPlOPRq50frHKRkVEYvgw69xJTiUCOO_GD9CnyhRE,3847 +yaml/cyaml.pyc,, +yaml/cyaml.pyc,sha256=d4I2kzjfpquSbIVnKiRUrRQwsl6DD1XvGaG95tNmq8Q,4732 +yaml/dumper.py,sha256=yMFwlojOY-M4EMUT8FiROY2QC2GAGQO7Iiky0xBlJFc,2833 +yaml/dumper.pyc,, +yaml/dumper.pyc,sha256=i8rFGqiwL1AEH13PrSABDaN3EQOoeqfNJENo7UwyqE8,2658 +yaml/emitter.py,sha256=B_uViWV5NzKMlh6NQe3zHnDYxCG77jNiyQXy2okKHkY,43407 +yaml/emitter.pyc,, +yaml/emitter.pyc,sha256=vrydvQ2PESBMEVDz7FC5GsjabaRVwnx0DuJw7r-odN8,31941 +yaml/error.py,sha256=7K-NdIv0qNKPKbnXxEg0L_b9K7nYDORr3rzm8_b-iBY,2559 +yaml/error.pyc,, +yaml/error.pyc,sha256=9IhhGlvtTyEdCPuuI98VOV37GZG7Ar8rWezPV3NP8nc,2945 +yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 +yaml/events.pyc,, +yaml/events.pyc,sha256=gWhm4jUz1j4EW5GcCvLs3Rj7kYoB1YoLFx7uNpEny3Q,4894 +yaml/loader.py,sha256=syIemRR13RodV1USH2kZhbHFAwmJgLoVJ9mG_FMJAaY,2055 +yaml/loader.pyc,, +yaml/loader.pyc,sha256=f3B23BA4pq6rHlY_0F5SxI3iyS0Y6mecTUUirZgm3wA,2828 +yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 +yaml/nodes.pyc,, +yaml/nodes.pyc,sha256=xl5BrorfAo00J9fOyn7FhbHFEgcPlagO22tX9MWqbio,2166 +yaml/parser.py,sha256=sgXahZA3DkySYnaC4D_zcl3l2y4Y5R40icWtdwkF_NE,25542 +yaml/parser.pyc,, +yaml/parser.pyc,sha256=c_hC6X2KXXtJq-y-4elIJRTouRUF8i7A32Jqnot-s1o,14643 +yaml/reader.py,sha256=uOWnu-dPklx9sNVeMPqxM6eQeUHqGHZmuaOK8CmKYOE,7212 +yaml/reader.pyc,, +yaml/reader.pyc,sha256=XlYdj29ZgOtWWPKnYCgUafD-aGyHFZzW-hfPkZMQYDA,5903 +yaml/representer.py,sha256=hES308Ty6i0AbFJS1t1z71j6Z9mODrjIEWr4kqS26fI,17787 +yaml/representer.pyc,, +yaml/representer.pyc,sha256=QMUzMgYraw_FXdCT2hTafBRv3wxCq2PBoHR_bzAheLc,14655 +yaml/resolver.py,sha256=kSdAHxdtTkzmdlDCAGMQQuQcJ90L03J6xSQcHDM9R88,9151 +yaml/resolver.pyc,, +yaml/resolver.pyc,sha256=nS-7MnuwkHyQxazVhwBVuNpyXznOxThtJexYDVDaY-Q,6702 +yaml/scanner.py,sha256=yuGHsVBr9J9zXjfBSfmEJxGm-bwB_km2T-ht9wWgtok,52027 +yaml/scanner.pyc,, +yaml/scanner.pyc,sha256=OezLOIfDekunhX9iJCGxOX-FKB9oYKcS20Pae2yqhzk,32638 +yaml/serializer.py,sha256=tRsRwfu5E9fpLU7LY3vBQf2prt77hwnYlMt5dnBJLig,4171 +yaml/serializer.pyc,, +yaml/serializer.pyc,sha256=t38Tz8RJVOsZIs3EhP3LI5GwptqVbEUqtchOhr5NFUU,4311 +yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 +yaml/tokens.pyc,, +yaml/tokens.pyc,sha256=IsXjRzTXD72q9NVo3Xp8tb7Q1b-39g4yFb2NxzKLkKk,6429 diff --git a/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/WHEEL b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/WHEEL new file mode 100644 index 0000000..0ee8f90 --- /dev/null +++ b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: false +Tag: cp27-cp27m-macosx_10_9_x86_64 + diff --git a/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/top_level.txt b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/top_level.txt new file mode 100644 index 0000000..e6475e9 --- /dev/null +++ b/venv/lib/python2.7/site-packages/PyYAML-5.4.1.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_yaml +yaml diff --git a/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/AUTHORS b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/AUTHORS new file mode 100644 index 0000000..f7a7aca --- /dev/null +++ b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/AUTHORS @@ -0,0 +1,228 @@ +Pygments is written and maintained by Georg Brandl . + +Major developers are Tim Hatch and Armin Ronacher +. + +Other contributors, listed alphabetically, are: + +* Sam Aaron -- Ioke lexer +* Ali Afshar -- image formatter +* Thomas Aglassinger -- Easytrieve, JCL, Rexx, Transact-SQL and VBScript + lexers +* Muthiah Annamalai -- Ezhil lexer +* Kumar Appaiah -- Debian control lexer +* Andreas Amann -- AppleScript lexer +* Timothy Armstrong -- Dart lexer fixes +* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers +* Jeremy Ashkenas -- CoffeeScript lexer +* José Joaquín Atria -- Praat lexer +* Stefan Matthias Aust -- Smalltalk lexer +* Lucas Bajolet -- Nit lexer +* Ben Bangert -- Mako lexers +* Max Battcher -- Darcs patch lexer +* Thomas Baruchel -- APL lexer +* Tim Baumann -- (Literate) Agda lexer +* Paul Baumgart, 280 North, Inc. -- Objective-J lexer +* Michael Bayer -- Myghty lexers +* Thomas Beale -- Archetype lexers +* John Benediktsson -- Factor lexer +* Trevor Bergeron -- mIRC formatter +* Vincent Bernat -- LessCSS lexer +* Christopher Bertels -- Fancy lexer +* Sébastien Bigaret -- QVT Operational lexer +* Jarrett Billingsley -- MiniD lexer +* Adam Blinkinsop -- Haskell, Redcode lexers +* Stéphane Blondon -- SGF lexer +* Frits van Bommel -- assembler lexers +* Pierre Bourdon -- bugfixes +* Matthias Bussonnier -- ANSI style handling for terminal-256 formatter +* chebee7i -- Python traceback lexer improvements +* Hiram Chirino -- Scaml and Jade lexers +* Mauricio Caceres -- SAS and Stata lexers. +* Ian Cooper -- VGL lexer +* David Corbett -- Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers +* Leaf Corcoran -- MoonScript lexer +* Christopher Creutzig -- MuPAD lexer +* Daniël W. Crompton -- Pike lexer +* Pete Curry -- bugfixes +* Bryan Davis -- EBNF lexer +* Bruno Deferrari -- Shen lexer +* Giedrius Dubinskas -- HTML formatter improvements +* Owen Durni -- Haxe lexer +* Alexander Dutton, Oxford University Computing Services -- SPARQL lexer +* James Edwards -- Terraform lexer +* Nick Efford -- Python 3 lexer +* Sven Efftinge -- Xtend lexer +* Artem Egorkine -- terminal256 formatter +* Matthew Fernandez -- CAmkES lexer +* Michael Ficarra -- CPSA lexer +* James H. Fisher -- PostScript lexer +* William S. Fulton -- SWIG lexer +* Carlos Galdino -- Elixir and Elixir Console lexers +* Michael Galloy -- IDL lexer +* Naveen Garg -- Autohotkey lexer +* Laurent Gautier -- R/S lexer +* Alex Gaynor -- PyPy log lexer +* Richard Gerkin -- Igor Pro lexer +* Alain Gilbert -- TypeScript lexer +* Alex Gilding -- BlitzBasic lexer +* Bertrand Goetzmann -- Groovy lexer +* Krzysiek Goj -- Scala lexer +* Andrey Golovizin -- BibTeX lexers +* Matt Good -- Genshi, Cheetah lexers +* Michał Górny -- vim modeline support +* Alex Gosse -- TrafficScript lexer +* Patrick Gotthardt -- PHP namespaces support +* Olivier Guibe -- Asymptote lexer +* Phil Hagelberg -- Fennel lexer +* Florian Hahn -- Boogie lexer +* Martin Harriman -- SNOBOL lexer +* Matthew Harrison -- SVG formatter +* Steven Hazel -- Tcl lexer +* Dan Michael Heggø -- Turtle lexer +* Aslak Hellesøy -- Gherkin lexer +* Greg Hendershott -- Racket lexer +* Justin Hendrick -- ParaSail lexer +* Jordi Gutiérrez Hermoso -- Octave lexer +* David Hess, Fish Software, Inc. -- Objective-J lexer +* Varun Hiremath -- Debian control lexer +* Rob Hoelz -- Perl 6 lexer +* Doug Hogan -- Mscgen lexer +* Ben Hollis -- Mason lexer +* Max Horn -- GAP lexer +* Alastair Houghton -- Lexer inheritance facility +* Tim Howard -- BlitzMax lexer +* Dustin Howett -- Logos lexer +* Ivan Inozemtsev -- Fantom lexer +* Hiroaki Itoh -- Shell console rewrite, Lexers for PowerShell session, + MSDOS session, BC, WDiff +* Brian R. Jackson -- Tea lexer +* Christian Jann -- ShellSession lexer +* Dennis Kaarsemaker -- sources.list lexer +* Dmitri Kabak -- Inferno Limbo lexer +* Igor Kalnitsky -- vhdl lexer +* Alexander Kit -- MaskJS lexer +* Pekka Klärck -- Robot Framework lexer +* Gerwin Klein -- Isabelle lexer +* Eric Knibbe -- Lasso lexer +* Stepan Koltsov -- Clay lexer +* Adam Koprowski -- Opa lexer +* Benjamin Kowarsch -- Modula-2 lexer +* Domen Kožar -- Nix lexer +* Oleh Krekel -- Emacs Lisp lexer +* Alexander Kriegisch -- Kconfig and AspectJ lexers +* Marek Kubica -- Scheme lexer +* Jochen Kupperschmidt -- Markdown processor +* Gerd Kurzbach -- Modelica lexer +* Jon Larimer, Google Inc. -- Smali lexer +* Olov Lassus -- Dart lexer +* Matt Layman -- TAP lexer +* Kristian Lyngstøl -- Varnish lexers +* Sylvestre Ledru -- Scilab lexer +* Chee Sing Lee -- Flatline lexer +* Mark Lee -- Vala lexer +* Valentin Lorentz -- C++ lexer improvements +* Ben Mabey -- Gherkin lexer +* Angus MacArthur -- QML lexer +* Louis Mandel -- X10 lexer +* Louis Marchand -- Eiffel lexer +* Simone Margaritelli -- Hybris lexer +* Kirk McDonald -- D lexer +* Gordon McGregor -- SystemVerilog lexer +* Stephen McKamey -- Duel/JBST lexer +* Brian McKenna -- F# lexer +* Charles McLaughlin -- Puppet lexer +* Kurt McKee -- Tera Term macro lexer +* Lukas Meuser -- BBCode formatter, Lua lexer +* Cat Miller -- Pig lexer +* Paul Miller -- LiveScript lexer +* Hong Minhee -- HTTP lexer +* Michael Mior -- Awk lexer +* Bruce Mitchener -- Dylan lexer rewrite +* Reuben Morais -- SourcePawn lexer +* Jon Morton -- Rust lexer +* Paulo Moura -- Logtalk lexer +* Mher Movsisyan -- DTD lexer +* Dejan Muhamedagic -- Crmsh lexer +* Ana Nelson -- Ragel, ANTLR, R console lexers +* Kurt Neufeld -- Markdown lexer +* Nam T. Nguyen -- Monokai style +* Jesper Noehr -- HTML formatter "anchorlinenos" +* Mike Nolta -- Julia lexer +* Jonas Obrist -- BBCode lexer +* Edward O'Callaghan -- Cryptol lexer +* David Oliva -- Rebol lexer +* Pat Pannuto -- nesC lexer +* Jon Parise -- Protocol buffers and Thrift lexers +* Benjamin Peterson -- Test suite refactoring +* Ronny Pfannschmidt -- BBCode lexer +* Dominik Picheta -- Nimrod lexer +* Andrew Pinkham -- RTF Formatter Refactoring +* Clément Prévost -- UrbiScript lexer +* Tanner Prynn -- cmdline -x option and loading lexers from files +* Oleh Prypin -- Crystal lexer (based on Ruby lexer) +* Elias Rabel -- Fortran fixed form lexer +* raichoo -- Idris lexer +* Kashif Rasul -- CUDA lexer +* Nathan Reed -- HLSL lexer +* Justin Reidy -- MXML lexer +* Norman Richards -- JSON lexer +* Corey Richardson -- Rust lexer updates +* Lubomir Rintel -- GoodData MAQL and CL lexers +* Andre Roberge -- Tango style +* Georg Rollinger -- HSAIL lexer +* Michiel Roos -- TypoScript lexer +* Konrad Rudolph -- LaTeX formatter enhancements +* Mario Ruggier -- Evoque lexers +* Miikka Salminen -- Lovelace style, Hexdump lexer, lexer enhancements +* Stou Sandalski -- NumPy, FORTRAN, tcsh and XSLT lexers +* Matteo Sasso -- Common Lisp lexer +* Joe Schafer -- Ada lexer +* Ken Schutte -- Matlab lexers +* René Schwaiger -- Rainbow Dash style +* Sebastian Schweizer -- Whiley lexer +* Tassilo Schweyer -- Io, MOOCode lexers +* Ted Shaw -- AutoIt lexer +* Joerg Sieker -- ABAP lexer +* Robert Simmons -- Standard ML lexer +* Kirill Simonov -- YAML lexer +* Corbin Simpson -- Monte lexer +* Alexander Smishlajev -- Visual FoxPro lexer +* Steve Spigarelli -- XQuery lexer +* Jerome St-Louis -- eC lexer +* Camil Staps -- Clean and NuSMV lexers; Solarized style +* James Strachan -- Kotlin lexer +* Tom Stuart -- Treetop lexer +* Colin Sullivan -- SuperCollider lexer +* Ben Swift -- Extempore lexer +* Edoardo Tenani -- Arduino lexer +* Tiberius Teng -- default style overhaul +* Jeremy Thurgood -- Erlang, Squid config lexers +* Brian Tiffin -- OpenCOBOL lexer +* Bob Tolbert -- Hy lexer +* Matthias Trute -- Forth lexer +* Erick Tryzelaar -- Felix lexer +* Alexander Udalov -- Kotlin lexer improvements +* Thomas Van Doren -- Chapel lexer +* Daniele Varrazzo -- PostgreSQL lexers +* Abe Voelker -- OpenEdge ABL lexer +* Pepijn de Vos -- HTML formatter CTags support +* Matthias Vallentin -- Bro lexer +* Benoît Vinot -- AMPL lexer +* Linh Vu Hong -- RSL lexer +* Nathan Weizenbaum -- Haml and Sass lexers +* Nathan Whetsell -- Csound lexers +* Dietmar Winkler -- Modelica lexer +* Nils Winter -- Smalltalk lexer +* Davy Wybiral -- Clojure lexer +* Whitney Young -- ObjectiveC lexer +* Diego Zamboni -- CFengine3 lexer +* Enrique Zamudio -- Ceylon lexer +* Alex Zimin -- Nemerle lexer +* Rob Zimmerman -- Kal lexer +* Vincent Zurczak -- Roboconf lexer +* Rostyslav Golda -- FloScript lexer +* GitHub, Inc -- DASM16, Augeas, TOML, and Slash lexers +* Simon Garnotel -- FreeFem++ lexer + +Many thanks for all contributions! diff --git a/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/INSTALLER b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/LICENSE b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/LICENSE new file mode 100644 index 0000000..13d1c74 --- /dev/null +++ b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/LICENSE @@ -0,0 +1,25 @@ +Copyright (c) 2006-2019 by the respective authors (see AUTHORS file). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/METADATA b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/METADATA new file mode 100644 index 0000000..4214b2f --- /dev/null +++ b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/METADATA @@ -0,0 +1,50 @@ +Metadata-Version: 2.1 +Name: Pygments +Version: 2.5.2 +Summary: Pygments is a syntax highlighting package written in Python. +Home-page: http://pygments.org/ +Author: Georg Brandl +Author-email: georg@python.org +License: BSD License +Keywords: syntax highlighting +Platform: any +Classifier: License :: OSI Approved :: BSD License +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: End Users/Desktop +Classifier: Intended Audience :: System Administrators +Classifier: Development Status :: 6 - Mature +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Operating System :: OS Independent +Classifier: Topic :: Text Processing :: Filters +Classifier: Topic :: Utilities +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* + + +Pygments +~~~~~~~~ + +Pygments is a syntax highlighting package written in Python. + +It is a generic syntax highlighter suitable for use in code hosting, forums, +wikis or other applications that need to prettify source code. Highlights +are: + +* a wide range of over 300 languages and other text formats is supported +* special attention is paid to details, increasing quality by a fair amount +* support for new languages and formats are added easily +* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image formats that PIL supports and ANSI sequences +* it is usable as a command-line tool and as a library + +:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. +:license: BSD, see LICENSE for details. + + diff --git a/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/RECORD b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/RECORD new file mode 100644 index 0000000..2f5d11c --- /dev/null +++ b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/RECORD @@ -0,0 +1,451 @@ +../../../bin/pygmentize,sha256=SizFkARASmvo9jzLkXTZchae-1KMLVuK07KG1Mn2wGY,259 +Pygments-2.5.2.dist-info/AUTHORS,sha256=TF7aH1YpYbhTCI2skOIlbY17LFDFhFe72J0j4s0-A90,8343 +Pygments-2.5.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Pygments-2.5.2.dist-info/LICENSE,sha256=RbiNNEnDeAZZR1i_PEhNnZixKx7MFj9lQx_gf-pgJfA,1331 +Pygments-2.5.2.dist-info/METADATA,sha256=HuyzWPJj16jDq4zqQNVwMkAhLB5d79yjk4Kh2e_WiK8,1975 +Pygments-2.5.2.dist-info/RECORD,, +Pygments-2.5.2.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +Pygments-2.5.2.dist-info/entry_points.txt,sha256=NXt9BRDRv6tAfDwqKM0bDHrrxaIt2f1nxH9CwjyjSKc,54 +Pygments-2.5.2.dist-info/top_level.txt,sha256=RjKKqrVIStoebLHdbs0yZ2Lk4rS7cxGguXsLCYvZ2Ak,9 +pygments/__init__.py,sha256=ClnXejMf3_glSTxF1L99Md0Mpf9365Sgrjsq7h3X4ZQ,3163 +pygments/__init__.pyc,, +pygments/__main__.py,sha256=JV6RSKzbYgMQHLf0nZGzfq1IXxns2iGunsfkY3jxFKo,372 +pygments/__main__.pyc,, +pygments/cmdline.py,sha256=Sr5RQ7uaxZe7gVGgOgypYUAF5xEcGh2Vvb3HYIX-7UQ,19525 +pygments/cmdline.pyc,, +pygments/console.py,sha256=QF0bQHbGeFRSetc3g5JsmGziVHQqIZCprEwNlZFtiRg,1721 +pygments/console.pyc,, +pygments/filter.py,sha256=83wVChi33J4gW5GV-ooZSHoXWoqzInGBA74DhW239lE,2038 +pygments/filter.pyc,, +pygments/filters/__init__.py,sha256=YcsNTHkr2izYgwenZeUKegrjMzFNzF9V-FGDmwtjSV4,11573 +pygments/filters/__init__.pyc,, +pygments/formatter.py,sha256=9ceInaU6dqybrzludvZZoqNEBmBvofYVFa63hFRUrJI,2948 +pygments/formatter.pyc,, +pygments/formatters/__init__.py,sha256=qUkhNBzgq_1RmRc5v4y22DqDPsMoJuo91rSsr5cV2FM,5128 +pygments/formatters/__init__.pyc,, +pygments/formatters/_mapping.py,sha256=eFk9dOhQeV3W3bvC-8hQomqu0yd_txUFxI-jysNTlMM,6214 +pygments/formatters/_mapping.pyc,, +pygments/formatters/bbcode.py,sha256=_K7UzwyT70snOYAiT3UkItbXRwQYVuTHpr1AZtRHL6Y,3314 +pygments/formatters/bbcode.pyc,, +pygments/formatters/html.py,sha256=f0WJ1uuFLF0Dp2YalcMN4aq199nCfYnexiFip7FmQVQ,32687 +pygments/formatters/html.pyc,, +pygments/formatters/img.py,sha256=2j8uM6BsmF-8nfbNAUZR_Xa2PlVVPEacVSdtVL3VVu0,19838 +pygments/formatters/img.pyc,, +pygments/formatters/irc.py,sha256=nU9jSjARuRaZpCuCey7bnRwGTGKeCTEhm_yDDYxzKQ8,5869 +pygments/formatters/irc.pyc,, +pygments/formatters/latex.py,sha256=GzaZW3aQLWIyx4AFh_9l9UjaTGCL_SXk-KbTKXaS0jI,17758 +pygments/formatters/latex.pyc,, +pygments/formatters/other.py,sha256=Qfc5OixOxM7YEy0d0NJBT750ukj-uPyhxKtHGTm0Vlc,5140 +pygments/formatters/other.pyc,, +pygments/formatters/rtf.py,sha256=z8LTTuEXwx3hpLaG0qeJumZCkUfseLIBsxhZE-0tEKg,5050 +pygments/formatters/rtf.pyc,, +pygments/formatters/svg.py,sha256=TIKW5KJVOtjY1CO4Y9Kqid9w2IFpfkVR3JD1mEujRJY,5840 +pygments/formatters/svg.pyc,, +pygments/formatters/terminal.py,sha256=XcyogF_qynLMFC10GvUen0tH9AH0zR8q85xil0rlpb0,4997 +pygments/formatters/terminal.pyc,, +pygments/formatters/terminal256.py,sha256=1d9m96FPG0xEaXEqX3j0ABGWUpEjZKxhVGLWk-8sm7o,11068 +pygments/formatters/terminal256.pyc,, +pygments/lexer.py,sha256=NG6uRD_EjEdHzZh0IdzC2taAZtIV_PunZwtajy1q8DE,31698 +pygments/lexer.pyc,, +pygments/lexers/__init__.py,sha256=4b4C0upAO06vSKrssOmTTiIsqSTKJM4RbjprytoNjaA,11385 +pygments/lexers/__init__.pyc,, +pygments/lexers/_asy_builtins.py,sha256=zO_y8v-bp6kjlIwvbmse79qY8P7qAUhoVObaX9Qy3S8,27311 +pygments/lexers/_asy_builtins.pyc,, +pygments/lexers/_cl_builtins.py,sha256=x-mRhM6ukZv0pxYtqCq7SlsezhL8L9fpcCQ-gou0Z9w,14018 +pygments/lexers/_cl_builtins.pyc,, +pygments/lexers/_cocoa_builtins.py,sha256=xa2yIFoOcQmYuv014JnBMYlHIrEnD1FMWjsYemJyVm0,40001 +pygments/lexers/_cocoa_builtins.pyc,, +pygments/lexers/_csound_builtins.py,sha256=dWlaJZAK66OZO_gsFNJOad9jWF3dp8HWiskIK5jaEag,17504 +pygments/lexers/_csound_builtins.pyc,, +pygments/lexers/_lasso_builtins.py,sha256=1jR-3eDhf1CUcPSSEXgbJMymAkQaJqpWIPjYM4rL6Sk,134534 +pygments/lexers/_lasso_builtins.pyc,, +pygments/lexers/_lua_builtins.py,sha256=PrFIbnBC7CfaRzIjcTEZvl0gsNpbrzE92e1gSNV2YbU,8340 +pygments/lexers/_lua_builtins.pyc,, +pygments/lexers/_mapping.py,sha256=aXLeu68ne_gAAboufuMxiqYb1Y07cWQOl22_3HXD6tU,57717 +pygments/lexers/_mapping.pyc,, +pygments/lexers/_mql_builtins.py,sha256=MS7566jpdiud7gEa_y4iJpHLkqjpo-7Y8WwB9MyMUhY,24737 +pygments/lexers/_mql_builtins.pyc,, +pygments/lexers/_openedge_builtins.py,sha256=hCqbIZd_qcBTlLyQGME8mqijUDCIm5P9HtIsv8JCEG8,48362 +pygments/lexers/_openedge_builtins.pyc,, +pygments/lexers/_php_builtins.py,sha256=uH5hykXbyM4d6dXqs78WXEYkKwVgEwIwwOP3HXIb5Yw,154429 +pygments/lexers/_php_builtins.pyc,, +pygments/lexers/_postgres_builtins.py,sha256=OI0j7i72gKoNGJomATjK_P00D7cVT6bpPqeeSB4k0aM,11210 +pygments/lexers/_postgres_builtins.pyc,, +pygments/lexers/_scilab_builtins.py,sha256=5gjuC1Ny-kJzHi1ng-TI4TJKVuIut1oj4J3f2bPdLIk,52405 +pygments/lexers/_scilab_builtins.pyc,, +pygments/lexers/_sourcemod_builtins.py,sha256=h79SYG76SHCsSk7qXI7ThqIEpHfg04-eQ2y9z4sqCDk,27113 +pygments/lexers/_sourcemod_builtins.pyc,, +pygments/lexers/_stan_builtins.py,sha256=BfSr_PiG5QE0-7hUfX4g_jdwugKf1zWtGE2w33FotvA,10481 +pygments/lexers/_stan_builtins.pyc,, +pygments/lexers/_stata_builtins.py,sha256=rZ8lopR_vKuDCBeCF9oPf71sHkD6n-tN6T5QpyOVEg4,25228 +pygments/lexers/_stata_builtins.pyc,, +pygments/lexers/_tsql_builtins.py,sha256=5qrkZJHk_m1SgTnhCrKp5jXJxexjCaf4GtRkY5_PTEA,15484 +pygments/lexers/_tsql_builtins.pyc,, +pygments/lexers/_vbscript_builtins.py,sha256=chotaveFeFC-A6qcRAghQC7fAwrDmV-BKE_TW-hrZwk,4249 +pygments/lexers/_vbscript_builtins.pyc,, +pygments/lexers/_vim_builtins.py,sha256=Il_pjrP0PWUjMLCRPOZPoLgd_3jauvv9SGtBOkzmU2A,57090 +pygments/lexers/_vim_builtins.pyc,, +pygments/lexers/actionscript.py,sha256=jQTpfKe0OwRQTknMs132_WhqEDIW7lQbLW0HU5D0cOs,11181 +pygments/lexers/actionscript.pyc,, +pygments/lexers/agile.py,sha256=0yI_Bq_-ekqFCiMzkcnJfNQ12iyA4QmPk70RCfl1Xa0,900 +pygments/lexers/agile.pyc,, +pygments/lexers/algebra.py,sha256=vMjSoC9CgSWUMoaNu7gysQDdAc46t_Y6U4dX2mEzNCc,7201 +pygments/lexers/algebra.pyc,, +pygments/lexers/ambient.py,sha256=1_B2JkmFVgGq-JuEhmrXIu-q5WP2e7Ir5DSpO7qXN9E,2557 +pygments/lexers/ambient.pyc,, +pygments/lexers/ampl.py,sha256=HWeNZxYsNhPuGmW1lgNUxMe5zMtbMQ-xNFoj9oVOvq8,4123 +pygments/lexers/ampl.pyc,, +pygments/lexers/apl.py,sha256=gzIuS7p2Qz-pN5M0i45uvDow_gsNNus5k6zrwe19M9c,3174 +pygments/lexers/apl.pyc,, +pygments/lexers/archetype.py,sha256=luJBCChBsH6fdJOboz5pVTSNCHh7miLd1xtLnI7TH88,11136 +pygments/lexers/archetype.pyc,, +pygments/lexers/asm.py,sha256=kK84-in8Yb41iSVc8pBfTD7_43a0qfLth0gHh4WR5Dg,29855 +pygments/lexers/asm.pyc,, +pygments/lexers/automation.py,sha256=9oR495kiyEbl-ev7PWF4Mw-jvtuSbOkmKJRmOvUzQb8,19640 +pygments/lexers/automation.pyc,, +pygments/lexers/basic.py,sha256=siXk3fQfTEfJNeSW2sI-rfssoUpyj7drMdMrs5csYrs,27576 +pygments/lexers/basic.pyc,, +pygments/lexers/bibtex.py,sha256=fxbIyhfV1yrFfd7oyAp-fyss27T0Bfv8VqRdVnLg63Y,4725 +pygments/lexers/bibtex.pyc,, +pygments/lexers/boa.py,sha256=OB_W242mpr2vwbhg0MO4BpZcdhjaXuM6ffQ54zn3-ZI,3942 +pygments/lexers/boa.pyc,, +pygments/lexers/business.py,sha256=VXved88PH_Lg05WCN_Mb2bxXclZX6IYdE3KXZVXtUZI,27665 +pygments/lexers/business.pyc,, +pygments/lexers/c_cpp.py,sha256=6K3X-vogKt3KURQdKT_1X64CvdAn6vFJQvFVYfD9MMg,10512 +pygments/lexers/c_cpp.pyc,, +pygments/lexers/c_like.py,sha256=UusXq2S5d0v0CpGsxkmVludmu58WsLZQHWnsb0YwhK4,25080 +pygments/lexers/c_like.pyc,, +pygments/lexers/capnproto.py,sha256=pC3zXFSfYFHEIBq3OqLPGKl71K5HtdWnAEqMz6n8KFY,2194 +pygments/lexers/capnproto.pyc,, +pygments/lexers/chapel.py,sha256=VBTixkCdwOebIKTdW0oxTAlS2zE99EYUGN5hiCdnGeA,3824 +pygments/lexers/chapel.pyc,, +pygments/lexers/clean.py,sha256=XG0_2KVyxbRFp-_U5HgT1wN9srL522kOe_9T51HeQmA,6362 +pygments/lexers/clean.pyc,, +pygments/lexers/compiled.py,sha256=iGwVkCJ-SXoovHegOBSnOG518hHkDudegb9_qS-8vW0,1385 +pygments/lexers/compiled.pyc,, +pygments/lexers/configs.py,sha256=v2YS0XcHucKi1yE1S1O5-yIOj3Oy-sX3Hqp0DhKX-6o,32135 +pygments/lexers/configs.pyc,, +pygments/lexers/console.py,sha256=tj_ARAplXlqt2sGb2ycxsOy8xIL4NCAMOd3bZ0Zjojg,4120 +pygments/lexers/console.pyc,, +pygments/lexers/crystal.py,sha256=hTz20yWrjuam9JVG9Xxr6I7x50M_sIlfdBs0_gg5hKQ,16845 +pygments/lexers/crystal.pyc,, +pygments/lexers/csound.py,sha256=G4pXcEj6n1y49HMuB-XSljtzOf2zv2QyuQuXDqmVeTo,16739 +pygments/lexers/csound.pyc,, +pygments/lexers/css.py,sha256=GFWC8OBDKDkICn2bRXH2ZmpExGFwV3Hc5jfxEQELyeg,31509 +pygments/lexers/css.pyc,, +pygments/lexers/d.py,sha256=E_Gj-5pLeC3V6gUzJteVH7JqZEghad9iup19PT9am4k,9530 +pygments/lexers/d.pyc,, +pygments/lexers/dalvik.py,sha256=tAoPPa_iRXhWG_MzslSvBE99NlGnkx0WKnwdDQ3XU9o,4420 +pygments/lexers/dalvik.pyc,, +pygments/lexers/data.py,sha256=bp8iRknsEocR3BGye9-uNC3YRqh6yXmE0qIw4nnKAMw,19056 +pygments/lexers/data.pyc,, +pygments/lexers/diff.py,sha256=8jKEVtSA2YKprutpONqFvMKBhK1U_IFdxaScTuRNeU4,4873 +pygments/lexers/diff.pyc,, +pygments/lexers/dotnet.py,sha256=Itf0sbtWeooWcI9eBYNtcEXkr0i3qCmWS3GAX_7VSVI,27599 +pygments/lexers/dotnet.pyc,, +pygments/lexers/dsls.py,sha256=wZJNJpyqbmcqGu8pkLKgsYV7RYbnj81e5Zv_5scPqls,35836 +pygments/lexers/dsls.pyc,, +pygments/lexers/dylan.py,sha256=LkWTiLsU561_VQL-PUirryEt7ewbseLRJfN-H1twmiA,10402 +pygments/lexers/dylan.pyc,, +pygments/lexers/ecl.py,sha256=5ivxyk5lzMottCuIxyE7DBvWYJV5KTuaHNRkvOtgM7c,5875 +pygments/lexers/ecl.pyc,, +pygments/lexers/eiffel.py,sha256=He2DwoUqWqMt8_PDzoP3NuBl9AZ9K3_SmpGkIgSzWuI,2482 +pygments/lexers/eiffel.pyc,, +pygments/lexers/elm.py,sha256=91CM_h3PPoBLLm2stJqNZi3lgjhZH7NvzNKWdXAe8CA,2997 +pygments/lexers/elm.pyc,, +pygments/lexers/email.py,sha256=ap9imSi6jbbP7gPBAyc3rcNurVDSNmRKIWv0ByR6VOQ,5207 +pygments/lexers/email.pyc,, +pygments/lexers/erlang.py,sha256=cV4ibgUrR9iiHJFH0nfzTTkseM-F2b71mljP3lE91Uo,18976 +pygments/lexers/erlang.pyc,, +pygments/lexers/esoteric.py,sha256=I7YEPnQdftxEOasCec8_dxVr7zgypMtoYtds0v2srNQ,9489 +pygments/lexers/esoteric.pyc,, +pygments/lexers/ezhil.py,sha256=R26b4iXSpdMkgXewJN2INhJXL0ICXhW2o9fu3bn078U,3020 +pygments/lexers/ezhil.pyc,, +pygments/lexers/factor.py,sha256=nBYhJoNLkSxtshGrF08tSQKUq_TtgVp1ukKX4Zromm8,17864 +pygments/lexers/factor.pyc,, +pygments/lexers/fantom.py,sha256=3OTJDka8qeNRykM1Ki1Lyek6gd-jqOa-l5IgRbX8kSg,9982 +pygments/lexers/fantom.pyc,, +pygments/lexers/felix.py,sha256=DoSGdEntZgG3JUbeBA9fqUtg3lODbqwY3_XS6EIfXt4,9408 +pygments/lexers/felix.pyc,, +pygments/lexers/floscript.py,sha256=eza4Rw3RI3mFjIIAA2czmi2SlgbcSI1T8pNr7vUd0eY,2667 +pygments/lexers/floscript.pyc,, +pygments/lexers/forth.py,sha256=Yqm9z-PjymjQjaleCW-SNJdCCc_NWeFXMz_XvjtAStI,7179 +pygments/lexers/forth.pyc,, +pygments/lexers/fortran.py,sha256=XqwbZg25atjNDN8yUnqkxm1nfqbzSgZDqmKUIFNQSHk,9841 +pygments/lexers/fortran.pyc,, +pygments/lexers/foxpro.py,sha256=i1B6wX4U5oY8FJO5BGtTR0RaVWbO6P45PXxndi5HcpE,26236 +pygments/lexers/foxpro.pyc,, +pygments/lexers/freefem.py,sha256=bYEPIZ1mysE2Ub9WO8NPHefz-CaGqPiE0WbHZeMHPsQ,27086 +pygments/lexers/freefem.pyc,, +pygments/lexers/functional.py,sha256=gJqzgp1ujTa8Zk5hjzXdutz8vvSJpRxhqTVCkK03Ij0,698 +pygments/lexers/functional.pyc,, +pygments/lexers/go.py,sha256=aRdc0lsKbF7xxTcUnu35m-_e3SD7s2eBAllq1y7_qY8,3701 +pygments/lexers/go.pyc,, +pygments/lexers/grammar_notation.py,sha256=j_289Tqa4umdEu8F5JzjvWPNiGcs-nkOB0TVWNyMo0E,6329 +pygments/lexers/grammar_notation.pyc,, +pygments/lexers/graph.py,sha256=v013Gzn_RIuLrEz_DJuUah_vCpv6aVSMZpHGov19BMY,2756 +pygments/lexers/graph.pyc,, +pygments/lexers/graphics.py,sha256=xfr7jZ_JF81kh-RFxIFSKOa06W4z0YxWzOxXAmrLwMA,38259 +pygments/lexers/graphics.pyc,, +pygments/lexers/haskell.py,sha256=2DwahPHxii04ZtduqmZYRCR5y8VRsPFtI6y7rMeUr0I,32072 +pygments/lexers/haskell.pyc,, +pygments/lexers/haxe.py,sha256=uWeORmR1BBCtA_HKRJIhzl26GfkzxzVd7c8or-REw7s,30959 +pygments/lexers/haxe.pyc,, +pygments/lexers/hdl.py,sha256=Xpf_1SJ-Uwf94J6MK_C5wR7JyXQkDKtlNdJ7MLL6uzs,18179 +pygments/lexers/hdl.pyc,, +pygments/lexers/hexdump.py,sha256=7y6XhpOGaVfbtWPSzFxgen8u4sr9sWCbnRUTmvnW1KI,3507 +pygments/lexers/hexdump.pyc,, +pygments/lexers/html.py,sha256=B-XSH62dR1GZSJ6E3rDOoF6WO-FcKAnrCqTYvvm8jow,19280 +pygments/lexers/html.pyc,, +pygments/lexers/idl.py,sha256=hg7CnizaVt7br6ydWkt4VU9UMNax7gg4ToA3_rnqM1M,14986 +pygments/lexers/idl.pyc,, +pygments/lexers/igor.py,sha256=FP_3Uz06p1emRB1BqpJ_11KY5k38D5nBLP9nFLnXsHA,30917 +pygments/lexers/igor.pyc,, +pygments/lexers/inferno.py,sha256=iB07whrTd_qnsABOUalv999QhFYB2nhIHfTp_ECsTxM,3117 +pygments/lexers/inferno.pyc,, +pygments/lexers/installers.py,sha256=QVPOqFLmDydPhBJYmQcyjq6XQvcPb1Hxhpbv5JvgL-M,12866 +pygments/lexers/installers.pyc,, +pygments/lexers/int_fiction.py,sha256=-jBktm0onIUz_hzsP0lUd3g9aLXJ4KLls0gjIwSB46o,55779 +pygments/lexers/int_fiction.pyc,, +pygments/lexers/iolang.py,sha256=Sv9qzhNgvVz1xmStZOLm3KTvlcI2A1zywAWQTo6ahs0,1905 +pygments/lexers/iolang.pyc,, +pygments/lexers/j.py,sha256=2wqBgvkxF99yBTdyslEsaeweZuqNO_yNZPjTKRwNTdo,4527 +pygments/lexers/j.pyc,, +pygments/lexers/javascript.py,sha256=7FV6hoTtWwOXIZGGb_9NBq5RrkDlPi9qpOzhHMioW3Q,60240 +pygments/lexers/javascript.pyc,, +pygments/lexers/julia.py,sha256=ObRU-RjNe_N6zcQZgq5nws526X_j_4c4KPUFwwROFns,14179 +pygments/lexers/julia.pyc,, +pygments/lexers/jvm.py,sha256=Qsg2PugXHCD55g_w4GVI4FDFCfOBICYW70xKhWMfNiQ,70347 +pygments/lexers/jvm.pyc,, +pygments/lexers/lisp.py,sha256=oUWEXl8czd_ovmKgkROzATeDjy01jPXAne18zXtEYRY,143609 +pygments/lexers/lisp.pyc,, +pygments/lexers/make.py,sha256=dbnhkZWxESvkvV69TrQEZYdo4yiUGoBBIE-VpXX1uBM,7326 +pygments/lexers/make.pyc,, +pygments/lexers/markup.py,sha256=6ACdRUnjI6CGRwes8szHfUjZU-nR7C42y2dbP5EdJeI,20704 +pygments/lexers/markup.pyc,, +pygments/lexers/math.py,sha256=74YS-Z0zpBP6JYk1fsauYbW7XeZ-XPDTqKakbkX0v1Y,700 +pygments/lexers/math.pyc,, +pygments/lexers/matlab.py,sha256=0BTRjVCE7WZsmdbt92Ln2WIlLMtsr-tbBbedt2Y5WHE,30344 +pygments/lexers/matlab.pyc,, +pygments/lexers/mime.py,sha256=hf-dShZ8AUSIzTELUEnlel7gnZLZpiOd-OFehEDSba0,7975 +pygments/lexers/mime.pyc,, +pygments/lexers/ml.py,sha256=TfNYvefb7-eBlI6yimjd8uT3lTiN_4gERhpmRe6zmBA,27877 +pygments/lexers/ml.pyc,, +pygments/lexers/modeling.py,sha256=n4gorBPf3gttlsITHGYeOnrUjUWz3nCh5oLYkDMOnrM,13409 +pygments/lexers/modeling.pyc,, +pygments/lexers/modula2.py,sha256=zenAwJk17hVa1FnOTZHJAwLrDrmcurxu4yw7pUoa_Qk,52561 +pygments/lexers/modula2.pyc,, +pygments/lexers/monte.py,sha256=tIn0lsLdG0iHRX_01KI9OkR4iazyiV5F8H3OlkKdFZQ,6307 +pygments/lexers/monte.pyc,, +pygments/lexers/ncl.py,sha256=0U8xDdO0guIlnQKCHKmKQPXv91Jqy1YvrkNoMonaYp4,63986 +pygments/lexers/ncl.pyc,, +pygments/lexers/nimrod.py,sha256=ERUF4NVMUlbirF_FvN8EIXXFRv6RJqchq4rr9vugHPI,5174 +pygments/lexers/nimrod.pyc,, +pygments/lexers/nit.py,sha256=FSQCdLNjKUrw_pisiCH-m15EQcz30lv6wvvbTgkrB-Y,2743 +pygments/lexers/nit.pyc,, +pygments/lexers/nix.py,sha256=RTgXFxL2niA9iG1zLHRWdNZy70he_vE1D0-FcoU1cfw,4031 +pygments/lexers/nix.pyc,, +pygments/lexers/oberon.py,sha256=HMOnehgSbLaTV6l1e5b44aZttyE2YIfA2hzyj6MW5xU,3733 +pygments/lexers/oberon.pyc,, +pygments/lexers/objective.py,sha256=FA7gniip1eEDC9x1UIvdI8flRtFxehTHId0MlqB0llo,22789 +pygments/lexers/objective.pyc,, +pygments/lexers/ooc.py,sha256=lP6KSoWFrq9Q7w5F_aRSaLYUryh4nuBcPfnUkwyBQsU,2999 +pygments/lexers/ooc.pyc,, +pygments/lexers/other.py,sha256=0xuOYQ0uI9eLONFTNBv2e-hltZhQcN531NVi7e2AcQQ,1768 +pygments/lexers/other.pyc,, +pygments/lexers/parasail.py,sha256=YEgpP3B62qHYOBFcoChOfgzATczrSPj1WyovIgqW3gg,2737 +pygments/lexers/parasail.pyc,, +pygments/lexers/parsers.py,sha256=fhTyqwzifEpFFfW8emQ9WYYBwlUs48Sv_qykCUQoWHE,27590 +pygments/lexers/parsers.pyc,, +pygments/lexers/pascal.py,sha256=MYqLrHPEpuJLp1uNN19MaNiW-L-FboQapFSTvSkHS5I,32621 +pygments/lexers/pascal.pyc,, +pygments/lexers/pawn.py,sha256=LN0m73AC00wHyvBlbTPU1k2ihBdmDkfIFq24uAWvsF0,8021 +pygments/lexers/pawn.pyc,, +pygments/lexers/perl.py,sha256=1i-mPqF6GxGcX28bW4VlJBpvpZn8FQqWRDjYHQj1-xo,32012 +pygments/lexers/perl.pyc,, +pygments/lexers/php.py,sha256=OMO2MWHUIyc_zNDUQDajj1SJ1KDI4PBdtIwetBQgRe8,10821 +pygments/lexers/php.pyc,, +pygments/lexers/pony.py,sha256=h6S-MGKN7q7sk869oWjC1OcgV7zwXloYnGFshhTFxHk,3269 +pygments/lexers/pony.pyc,, +pygments/lexers/praat.py,sha256=aFOD7K8wEVjcr4Jb3DAGn5AmjhMDSHY8pVC4WQfjGlc,12292 +pygments/lexers/praat.pyc,, +pygments/lexers/prolog.py,sha256=TNj3F1ossufZ_XKVVrWJlRtPDRU1ExGO6NS0-TBq7gw,12405 +pygments/lexers/prolog.pyc,, +pygments/lexers/python.py,sha256=iMEg6aTP-NOg_UCDzEGYfTNGin5YTVvOE5AtAfruKdI,47399 +pygments/lexers/python.pyc,, +pygments/lexers/qvt.py,sha256=_lXPT5SdDEqhCmuq4TcO9JRrP703kIT3a1Y_ZW9NTCY,6097 +pygments/lexers/qvt.pyc,, +pygments/lexers/r.py,sha256=7oJ0Ihq6qdhgdzpKayeC-MciEKm6PWW3UWJIYnIK_iA,6279 +pygments/lexers/r.pyc,, +pygments/lexers/rdf.py,sha256=RAerwJHNjrtXXtua4UXRfUQkMQ36uqfQZlSj63yoQA8,14608 +pygments/lexers/rdf.pyc,, +pygments/lexers/rebol.py,sha256=3bhOFMMneP38O9aJFjPZlNTS6cwbcnDlJaDbfvF4x1g,18624 +pygments/lexers/rebol.pyc,, +pygments/lexers/resource.py,sha256=xbAErtO3-d4LQJJPnLfhD7Kxz_NVQp4WiYrFu52UX-o,2926 +pygments/lexers/resource.pyc,, +pygments/lexers/rnc.py,sha256=OxpGllFDAM6Vn_alGiaEKMzQDoqRCrl82ocOO4s6L_k,1990 +pygments/lexers/rnc.pyc,, +pygments/lexers/roboconf.py,sha256=9eZkX5xkajimTV1F5wr0Y8QHPfuEB659Lde8H5AzFfM,2070 +pygments/lexers/roboconf.pyc,, +pygments/lexers/robotframework.py,sha256=KvbCkzDdowczT0UzGW0y-k7_FRKab21j6G_DOR3E0_0,18736 +pygments/lexers/robotframework.pyc,, +pygments/lexers/ruby.py,sha256=rqBelW7OJZIP-J3MVPgQzhXTh3Ey41MjMmpbGQDv390,22168 +pygments/lexers/ruby.pyc,, +pygments/lexers/rust.py,sha256=hRhrSZiiLNlmQrWT2jEQSX6vCPgPe4AkMAW8GBx_FeQ,7738 +pygments/lexers/rust.pyc,, +pygments/lexers/sas.py,sha256=guELd_4GLI1fhZr3Sxtn80Gt6s6ViYFf4jWnK23zzDc,9449 +pygments/lexers/sas.pyc,, +pygments/lexers/scdoc.py,sha256=raoQeCR0E6sjvT56Lar0Wxc_1u6fB-gFjptjT0jE56g,1983 +pygments/lexers/scdoc.pyc,, +pygments/lexers/scripting.py,sha256=lc9oDOpkwKA9nUcHJ3x2XXs94I-UsaiEi4p9U_C58M8,67764 +pygments/lexers/scripting.pyc,, +pygments/lexers/sgf.py,sha256=R5Zqd5oVOyUd-NewEXMmACaEO5RX_F7eYUZaJXGTY4g,2024 +pygments/lexers/sgf.pyc,, +pygments/lexers/shell.py,sha256=00dGjndFJ6ZWZzsfKW3nKjIKG-CBwTHH-VYQQs57700,33870 +pygments/lexers/shell.pyc,, +pygments/lexers/slash.py,sha256=WN2f0VirklPe6djATJtbNMkFGRiuIykKZjqG19Rlgk8,8522 +pygments/lexers/slash.pyc,, +pygments/lexers/smalltalk.py,sha256=xwRETRB2O_cKHZU9w18QXZpiz87WOw0lULDhMxc9xnA,7215 +pygments/lexers/smalltalk.pyc,, +pygments/lexers/smv.py,sha256=mWuqz0uYpiQLOU4INlxjfrfslOC1yp1BQM8Ule3gqWw,2802 +pygments/lexers/smv.pyc,, +pygments/lexers/snobol.py,sha256=YFOOuPk4yBxg6stlIm6R3UiUgzkMjz06ac7dW3LRxNk,2756 +pygments/lexers/snobol.pyc,, +pygments/lexers/solidity.py,sha256=fW_aQc_HyRawyStUxllYhUn-NYJPCqzDH-ABWTeKcOI,3255 +pygments/lexers/solidity.pyc,, +pygments/lexers/special.py,sha256=hsAaqgRPHCANQNydjQfEhvE8mmlzxf-N7UMwHI8VC14,3152 +pygments/lexers/special.pyc,, +pygments/lexers/sql.py,sha256=50ufavADPsy9MMEJt3e9KpNRcIE6756lI0GI9-I5CEM,31782 +pygments/lexers/sql.pyc,, +pygments/lexers/stata.py,sha256=9IjM-8_OSSY7_7Fdq1QlNEqeo1KZx5G7_N9NL46jnF4,6457 +pygments/lexers/stata.pyc,, +pygments/lexers/supercollider.py,sha256=llVW-HUi7m4MNGy4wEp8bF2BJGTXdwF0oNfJfJ_sI8M,3516 +pygments/lexers/supercollider.pyc,, +pygments/lexers/tcl.py,sha256=ORf0CBXHwC2MFBpZpcK2sPBCCTyJ3rcwcYOIhN9s0AI,5398 +pygments/lexers/tcl.pyc,, +pygments/lexers/templates.py,sha256=3L_BF-4A1tPZYJoEI3mBtQc4qkL9IM2GdFWl4A88vrs,73561 +pygments/lexers/templates.pyc,, +pygments/lexers/teraterm.py,sha256=2DdFVGyKIF85efcB5QdqqQQNGjqRHoWzVc5psdhSD7c,6310 +pygments/lexers/teraterm.pyc,, +pygments/lexers/testing.py,sha256=Ci9pU2zhD_unhXRoiMlaCzjga2XY1ecvIrp04-wvdfA,10752 +pygments/lexers/testing.pyc,, +pygments/lexers/text.py,sha256=7cwhjV2GwLRH0CPjlOb7PLVa6XEiRQhDNFU1VO3KNjE,1030 +pygments/lexers/text.pyc,, +pygments/lexers/textedit.py,sha256=7F9f0-pAsorZpaFalHOZz5124fsdHCLTAWX_YuwA9XE,6092 +pygments/lexers/textedit.pyc,, +pygments/lexers/textfmts.py,sha256=_H9Ug-E7GKEXHl8TEyJ-bDyBROuAi9BhzF5AEtfgdQ4,13778 +pygments/lexers/textfmts.pyc,, +pygments/lexers/theorem.py,sha256=83_RPTNc88LYm96E9SzNu_zv9--KsUaB3RafbYhIZnw,18902 +pygments/lexers/theorem.pyc,, +pygments/lexers/trafficscript.py,sha256=BYTyTAlD4oDVZ9D1aRrmy4zIC4VJ_n2Lgkgq92DxeJM,1546 +pygments/lexers/trafficscript.pyc,, +pygments/lexers/typoscript.py,sha256=Leb81-51KKuK9FHoo1xKWJGPqTIsyVoeZkGcsK5tQzU,8224 +pygments/lexers/typoscript.pyc,, +pygments/lexers/unicon.py,sha256=xo0E3hnBW0gbdszL6n96Cdzume3l1DI7scgkIQ8koaw,18001 +pygments/lexers/unicon.pyc,, +pygments/lexers/urbi.py,sha256=Zq3PCTC-KI7QYuLZ7NSdikm9-MrAhrYH9DGXVSTT89I,5750 +pygments/lexers/urbi.pyc,, +pygments/lexers/varnish.py,sha256=Y2t_JY7uVz6pH3UvlpIvuaxurH4gRiQrP4Esqw5jPnk,7265 +pygments/lexers/varnish.pyc,, +pygments/lexers/verification.py,sha256=qk9xhUQDcD2jPT0k9bQlo_5eKM3UmL7RCiXceYFUF8U,3705 +pygments/lexers/verification.pyc,, +pygments/lexers/web.py,sha256=4thoq-m_kGixnDR2baWwN5eEqpFAeH3aRaOMK4J_GOE,918 +pygments/lexers/web.pyc,, +pygments/lexers/webmisc.py,sha256=ATqDw_kIhr8uc-hQTKJphKewrNXyXp2YjrRDtI9vfqc,39909 +pygments/lexers/webmisc.pyc,, +pygments/lexers/whiley.py,sha256=J9ZuO8Yv9DYl9Mb6IHyZz2zguGxZXBKxTSwDcxaii8o,4012 +pygments/lexers/whiley.pyc,, +pygments/lexers/x10.py,sha256=Lu35QT0l-objbi6mCm-rxZU_7gO1rZQhjA6JnZ-EBRI,1965 +pygments/lexers/x10.pyc,, +pygments/lexers/xorg.py,sha256=FDN0czbxMD6YDOqwL6ltspElwMoxxNVKW11OL--keQY,887 +pygments/lexers/xorg.pyc,, +pygments/lexers/zig.py,sha256=C3kbdZ_rJUb0hMK61UiFsjzJVvC_QIPJZ6glZDNPi78,4147 +pygments/lexers/zig.pyc,, +pygments/modeline.py,sha256=ctgJHLjLF23gklYyo7Nz6P3I3Z8ArewlT5R2n2KNatQ,1010 +pygments/modeline.pyc,, +pygments/plugin.py,sha256=QFSBZcOqSJqAVQnydwDg8_LG7GzkxUgWjb0FzqoQHEM,1734 +pygments/plugin.pyc,, +pygments/regexopt.py,sha256=yMZBB3DRudP4AjPGAUpIF__o_NWOK4HrNfFV6h04V1w,3094 +pygments/regexopt.pyc,, +pygments/scanner.py,sha256=_QI5jZlo4UoyY0B572ZHqWOvVLJTh9jIq31-iZr-EPs,3123 +pygments/scanner.pyc,, +pygments/sphinxext.py,sha256=eKh9KmLPgBj1kqPr-pduvPPDt3IEyNt4eloXgVM38Tc,4657 +pygments/sphinxext.pyc,, +pygments/style.py,sha256=U2hO7Cw7EogLs6IVxj1VprqlICAEh90B3Ayf6kTAATI,5758 +pygments/style.pyc,, +pygments/styles/__init__.py,sha256=TBRYkROPEACN-kE1nQ1ygrhU4efWVShENqI6aqjk5cE,2894 +pygments/styles/__init__.pyc,, +pygments/styles/abap.py,sha256=weNa2ATjBDbWN-EJp36KuapOv_161OYudM6ilzp_5tU,751 +pygments/styles/abap.pyc,, +pygments/styles/algol.py,sha256=aVMDywxJ1VRTQ-eYd7CZVQ1BFIWehw2G9OcGg5KmfFI,2263 +pygments/styles/algol.pyc,, +pygments/styles/algol_nu.py,sha256=xgZhMlsdR8RppCyaGliUKBWVvianjxt5KrIcWCJDVMM,2278 +pygments/styles/algol_nu.pyc,, +pygments/styles/arduino.py,sha256=MtP75GT5SqaAX2PfaC116iPETAPOaD6re6cZ1d9xehQ,4492 +pygments/styles/arduino.pyc,, +pygments/styles/autumn.py,sha256=setTunOOFJAmdVHab3wmv5OkZmjP6-NVoZjMAyQ2rYY,2144 +pygments/styles/autumn.pyc,, +pygments/styles/borland.py,sha256=UOFktPmmU_TK6prVMETvVm6FhT01oqsd9_HcG1NZq_Y,1562 +pygments/styles/borland.pyc,, +pygments/styles/bw.py,sha256=t0kQytwvh_0SMBcOcmM5foPcc3JWiSd8VWBIXkoP17s,1355 +pygments/styles/bw.pyc,, +pygments/styles/colorful.py,sha256=NV-MuEX61J0HH1M0dmurc0RNinp5eA9qIHTjhZ3M6ek,2778 +pygments/styles/colorful.pyc,, +pygments/styles/default.py,sha256=j124bQ-0TFJaQ2U3ZICWq8_KUOQdjUSxFVknFcpSF40,2532 +pygments/styles/default.pyc,, +pygments/styles/emacs.py,sha256=zNGOC_fHnCZxVphHkieHr7f-zxKkSg_PrFEwWGfQw2U,2486 +pygments/styles/emacs.pyc,, +pygments/styles/friendly.py,sha256=55qszHEliWiT8h1dW5GjnEA47CpXpJ0BX0C-x6EmZsQ,2515 +pygments/styles/friendly.pyc,, +pygments/styles/fruity.py,sha256=zkSwyKzmWDs9Jtzgq3rG4DathCH6Pq2JVLuUW8auKXI,1298 +pygments/styles/fruity.pyc,, +pygments/styles/igor.py,sha256=6GFYt43btx70XZoVDSAqljc1G7UJb6_r9euz0b5nWpY,739 +pygments/styles/igor.pyc,, +pygments/styles/inkpot.py,sha256=ecGBxZQw0UhueDHZA06wvgWizu2JzXg9YkYCoLYJuh4,2347 +pygments/styles/inkpot.pyc,, +pygments/styles/lovelace.py,sha256=PBObIz9_gAjMJ8YgNrm-_z2P_wG7moQ1BosKLThJl20,3173 +pygments/styles/lovelace.pyc,, +pygments/styles/manni.py,sha256=EmN6YSp-U-ccxqLqjfnIPg-qkIhUAlSb78tIBvwFCsA,2374 +pygments/styles/manni.pyc,, +pygments/styles/monokai.py,sha256=hT5jhhqRQoOmjdK1lZ56hspKke4UDCCiUc3B8m5osLY,5086 +pygments/styles/monokai.pyc,, +pygments/styles/murphy.py,sha256=ppT--IJLWtcbxKCNRBuusP4zdSmbR8YShosCdd3hpXs,2751 +pygments/styles/murphy.pyc,, +pygments/styles/native.py,sha256=xkphXXv8PvfbgawNSTR28LcEe1TQxFtdrk_sQcGeo2E,1938 +pygments/styles/native.pyc,, +pygments/styles/paraiso_dark.py,sha256=3a4BSgZQMfB8E2bUMi1WAWkDr98oFUfaPygcsl9B9ZM,5641 +pygments/styles/paraiso_dark.pyc,, +pygments/styles/paraiso_light.py,sha256=QsZyh5oPQb6wYgnoQAkH2MRBkJjRPqAu5De77diOeN8,5645 +pygments/styles/paraiso_light.pyc,, +pygments/styles/pastie.py,sha256=duELGPs_LEzLbesA39vu0MzxtwkPJ2wnV2rS_clTu2E,2473 +pygments/styles/pastie.pyc,, +pygments/styles/perldoc.py,sha256=Wf54Io76npBZEsVt8HuM-x7mpzJ7iwPgj5PP_hOf91w,2175 +pygments/styles/perldoc.pyc,, +pygments/styles/rainbow_dash.py,sha256=IlLrIcl76wy4aIiZIRWxMzUILOI9ms7YEX0o6UL9ROc,2480 +pygments/styles/rainbow_dash.pyc,, +pygments/styles/rrt.py,sha256=xQp_B5sDo4BJ4Mzx4PWVK6AW_pZs_XmIoM8zLwpfVTs,852 +pygments/styles/rrt.pyc,, +pygments/styles/sas.py,sha256=jC6iVFl7-xp0MKwFkPM9QbEInzxVlnhsluPR69iqMZE,1441 +pygments/styles/sas.pyc,, +pygments/styles/solarized.py,sha256=f_E9bd-THUcJUJR36hQgbu9BVIjLi6yiI_n07oRu2u4,3747 +pygments/styles/solarized.pyc,, +pygments/styles/stata_dark.py,sha256=K1AKYh93Jd9E_eWXhDw7-tM6fJbIuFeJcAR5jVE1Nkc,1245 +pygments/styles/stata_dark.pyc,, +pygments/styles/stata_light.py,sha256=cN0ulhqteDqKkGnOqAL1aNHy3AvYbmu-fS35XaMptKM,1274 +pygments/styles/stata_light.pyc,, +pygments/styles/tango.py,sha256=1VtAeshYeFh4jWITdb5_wf-7avl1DwtGWrQkvSKqJJo,7096 +pygments/styles/tango.pyc,, +pygments/styles/trac.py,sha256=wWJokrY8EWWxJTChPxxYsH_cB-CNN7coa1ZBihzbiG4,1933 +pygments/styles/trac.pyc,, +pygments/styles/vim.py,sha256=9PtHne1K4TmKIFcPoM4NY_HRV3naKXRIeEvMC437t7U,1976 +pygments/styles/vim.pyc,, +pygments/styles/vs.py,sha256=-mK8_RJJk12gbR-TXP1zedQpflKS2zc9xQQzHbZTB1E,1073 +pygments/styles/vs.pyc,, +pygments/styles/xcode.py,sha256=s3NuWSoZ8dRCuU0PU0-aDop4xqgAXP4rVefg5yFgQVg,1501 +pygments/styles/xcode.pyc,, +pygments/token.py,sha256=J1LOX6vjhiN3pTShN9Mj0MfbWPzhypuPQYZuw29E8As,6167 +pygments/token.pyc,, +pygments/unistring.py,sha256=aQZ1Bo7UJKKhGsEIchluK5tpq7F9tjfp-AAyCvvNQKE,64749 +pygments/unistring.pyc,, +pygments/util.py,sha256=uNlo3O-lYReb1XXrPFOrGAfKLioEDDh2dbDoWGQzUkc,11900 +pygments/util.pyc,, diff --git a/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/WHEEL b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/WHEEL new file mode 100644 index 0000000..8b701e9 --- /dev/null +++ b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/entry_points.txt b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/entry_points.txt new file mode 100644 index 0000000..756d801 --- /dev/null +++ b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +pygmentize = pygments.cmdline:main + diff --git a/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/top_level.txt b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/top_level.txt new file mode 100644 index 0000000..a9f49e0 --- /dev/null +++ b/venv/lib/python2.7/site-packages/Pygments-2.5.2.dist-info/top_level.txt @@ -0,0 +1 @@ +pygments diff --git a/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/INSTALLER b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/LICENSE b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/LICENSE new file mode 100644 index 0000000..2c19ef1 --- /dev/null +++ b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/LICENSE @@ -0,0 +1,25 @@ +Copyright 2012-2016 Dmitry Shachnev +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of the University nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/METADATA b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/METADATA new file mode 100644 index 0000000..814a2c4 --- /dev/null +++ b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/METADATA @@ -0,0 +1,108 @@ +Metadata-Version: 2.1 +Name: SecretStorage +Version: 2.3.1 +Summary: Python bindings to FreeDesktop.org Secret Service API +Home-page: https://github.com/mitya57/secretstorage +Author: Dmitry Shachnev +Author-email: mitya57@gmail.com +License: BSD +Platform: Linux +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Topic :: Security +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires: dbus +Requires: cryptography +Requires-Dist: cryptography +Provides-Extra: dbus-python +Requires-Dist: dbus-python ; extra == 'dbus-python' + +.. image:: https://api.travis-ci.org/mitya57/secretstorage.svg + :target: https://travis-ci.org/mitya57/secretstorage + :alt: Travis CI status + +Module description +================== + +This module provides a way for securely storing passwords and other secrets. + +It uses D-Bus `Secret Service`_ API that is supported by GNOME Keyring +(since version 2.30) and KSecretsService. + +The main classes provided are ``secretstorage.Item``, representing a secret +item (that has a *label*, a *secret* and some *attributes*) and +``secretstorage.Collection``, a place items are stored in. + +SecretStorage supports most of the functions provided by Secret Service, +including creating and deleting items and collections, editing items, +locking and unlocking collections (asynchronous unlocking is also supported). + +The documentation can be found on `secretstorage.readthedocs.io`_. + +.. _`Secret Service`: https://specifications.freedesktop.org/secret-service/ +.. _`secretstorage.readthedocs.io`: https://secretstorage.readthedocs.io/en/latest/ + +Building the module +=================== + +.. note:: + SecretStorage supports Python 2.7 and all versions of Python since 3.3. + Here we assume that your Python version is 3.x. + +SecretStorage requires these packages to work: + +* `dbus-python`_ +* `python-cryptography`_ + +To build SecretStorage, use this command:: + + python3 setup.py build + +If you have Sphinx_ installed, you can also build the documentation:: + + python3 setup.py build_sphinx + +.. _`dbus-python`: https://www.freedesktop.org/wiki/Software/DBusBindings/#dbus-python +.. _`python-cryptography`: https://pypi.python.org/pypi/cryptography +.. _Sphinx: http://sphinx-doc.org/ + +Testing the module +================== + +First, make sure that you have the Secret Service daemon installed. +The `GNOME Keyring`_ is the reference server-side implementation for the +Secret Service specification. + +.. _`GNOME Keyring`: https://download.gnome.org/sources/gnome-keyring/ + +Then, start the daemon and unlock the ``default`` collection, if needed. +The testsuite will fail to run if the ``default`` collection exists and is +locked. If it does not exist, the testsuite can also use the temporary +``session`` collection, as provided by the GNOME Keyring. + +Then, run the Python unittest module:: + + python3 -m unittest discover -s tests + +If you want to run the tests in an isolated or headless environment, run +this command in a D-Bus session:: + + dbus-run-session -- python3 -m unittest discover -s tests + +Get the code +============ + +SecretStorage is available under BSD license. The source code can be found +on GitHub_. + +.. _GitHub: https://github.com/mitya57/secretstorage + + diff --git a/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/RECORD b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/RECORD new file mode 100644 index 0000000..914f98f --- /dev/null +++ b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/RECORD @@ -0,0 +1,21 @@ +SecretStorage-2.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +SecretStorage-2.3.1.dist-info/LICENSE,sha256=Heu5_11nWf0jzbQOO6NLH6aN0LGWcrbRVFmfsKtwTFc,1504 +SecretStorage-2.3.1.dist-info/METADATA,sha256=CAYel07z1Az4REJDy81BB30toPuuruH58GNCQlQLijM,3640 +SecretStorage-2.3.1.dist-info/RECORD,, +SecretStorage-2.3.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +SecretStorage-2.3.1.dist-info/WHEEL,sha256=bK8TJl-oUKFDa18qkB68zwTZhIBCifqi4qjS_NS4aFQ,92 +SecretStorage-2.3.1.dist-info/top_level.txt,sha256=hveSi1OWGaEt3kEVbjmZ0M-ASPxi6y-nTPVa-d3c0B4,14 +secretstorage/__init__.py,sha256=uEmUndNB4FI6C_XYvPG0JqR0j1cjIpccIUNwOSj53mk,1974 +secretstorage/__init__.pyc,, +secretstorage/collection.py,sha256=eXP3wiZqXGD8UNTSM90PZBZZTj0-5hpEMQ6mfvbPh7s,7820 +secretstorage/collection.pyc,, +secretstorage/defines.py,sha256=d-pY31y2eAxjRJTMutWq1RI6IkgaPI1MgZAnyGg6dLg,792 +secretstorage/defines.pyc,, +secretstorage/dhcrypto.py,sha256=IaxVUfzLRwYmhWVVntp7qtiS29zAxENjuugGeuGaBKA,2426 +secretstorage/dhcrypto.pyc,, +secretstorage/exceptions.py,sha256=6W1HNcYX3FQNBzlzIDHicHYfiGbQnRzjUWOLxblE6HU,1540 +secretstorage/exceptions.pyc,, +secretstorage/item.py,sha256=r6NON9yd54LrmDHx6wvRRfUMYQQgOvJGg-liiBuVCjY,5118 +secretstorage/item.pyc,, +secretstorage/util.py,sha256=m-_6HOx48I8vwrdsVVC8aRuZVp_mW80NIv38JGnjBOU,6259 +secretstorage/util.pyc,, diff --git a/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/REQUESTED b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/WHEEL b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/WHEEL new file mode 100644 index 0000000..60b427d --- /dev/null +++ b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any + diff --git a/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/top_level.txt b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/top_level.txt new file mode 100644 index 0000000..0ec6ae8 --- /dev/null +++ b/venv/lib/python2.7/site-packages/SecretStorage-2.3.1.dist-info/top_level.txt @@ -0,0 +1 @@ +secretstorage diff --git a/venv/lib/python2.7/site-packages/_cffi_backend.so b/venv/lib/python2.7/site-packages/_cffi_backend.so new file mode 100755 index 0000000..0309729 Binary files /dev/null and b/venv/lib/python2.7/site-packages/_cffi_backend.so differ diff --git a/venv/lib/python2.7/site-packages/_dummy_thread/__init__.py b/venv/lib/python2.7/site-packages/_dummy_thread/__init__.py new file mode 100644 index 0000000..63dced6 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_dummy_thread/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +import sys +__future_module__ = True + +if sys.version_info[0] < 3: + from dummy_thread import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/venv/lib/python2.7/site-packages/_markupbase/__init__.py b/venv/lib/python2.7/site-packages/_markupbase/__init__.py new file mode 100644 index 0000000..2909065 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_markupbase/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +import sys +__future_module__ = True + +if sys.version_info[0] < 3: + from markupbase import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/venv/lib/python2.7/site-packages/_ordereddict.so b/venv/lib/python2.7/site-packages/_ordereddict.so new file mode 100755 index 0000000..9866f0d Binary files /dev/null and b/venv/lib/python2.7/site-packages/_ordereddict.so differ diff --git a/venv/lib/python2.7/site-packages/_pytest/__init__.py b/venv/lib/python2.7/site-packages/_pytest/__init__.py new file mode 100644 index 0000000..17cc20b --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +__all__ = ["__version__"] + +try: + from ._version import version as __version__ +except ImportError: + # broken installation, we don't even try + # unknown only works because we do poor mans version compare + __version__ = "unknown" diff --git a/venv/lib/python2.7/site-packages/_pytest/_argcomplete.py b/venv/lib/python2.7/site-packages/_pytest/_argcomplete.py new file mode 100644 index 0000000..c6cf1d8 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/_argcomplete.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +"""allow bash-completion for argparse with argcomplete if installed +needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail +to find the magic string, so _ARGCOMPLETE env. var is never set, and +this does not need special code. + +Function try_argcomplete(parser) should be called directly before +the call to ArgumentParser.parse_args(). + +The filescompleter is what you normally would use on the positional +arguments specification, in order to get "dirname/" after "dirn" +instead of the default "dirname ": + + optparser.add_argument(Config._file_or_dir, nargs='*' + ).completer=filescompleter + +Other, application specific, completers should go in the file +doing the add_argument calls as they need to be specified as .completer +attributes as well. (If argcomplete is not installed, the function the +attribute points to will not be used). + +SPEEDUP +======= +The generic argcomplete script for bash-completion +(/etc/bash_completion.d/python-argcomplete.sh ) +uses a python program to determine startup script generated by pip. +You can speed up completion somewhat by changing this script to include + # PYTHON_ARGCOMPLETE_OK +so the the python-argcomplete-check-easy-install-script does not +need to be called to find the entry point of the code and see if that is +marked with PYTHON_ARGCOMPLETE_OK + +INSTALL/DEBUGGING +================= +To include this support in another application that has setup.py generated +scripts: +- add the line: + # PYTHON_ARGCOMPLETE_OK + near the top of the main python entry point +- include in the file calling parse_args(): + from _argcomplete import try_argcomplete, filescompleter + , call try_argcomplete just before parse_args(), and optionally add + filescompleter to the positional arguments' add_argument() +If things do not work right away: +- switch on argcomplete debugging with (also helpful when doing custom + completers): + export _ARC_DEBUG=1 +- run: + python-argcomplete-check-easy-install-script $(which appname) + echo $? + will echo 0 if the magic line has been found, 1 if not +- sometimes it helps to find early on errors using: + _ARGCOMPLETE=1 _ARC_DEBUG=1 appname + which should throw a KeyError: 'COMPLINE' (which is properly set by the + global argcomplete script). +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import sys +from glob import glob + + +class FastFilesCompleter(object): + "Fast file completer class" + + def __init__(self, directories=True): + self.directories = directories + + def __call__(self, prefix, **kwargs): + """only called on non option completions""" + if os.path.sep in prefix[1:]: + prefix_dir = len(os.path.dirname(prefix) + os.path.sep) + else: + prefix_dir = 0 + completion = [] + globbed = [] + if "*" not in prefix and "?" not in prefix: + # we are on unix, otherwise no bash + if not prefix or prefix[-1] == os.path.sep: + globbed.extend(glob(prefix + ".*")) + prefix += "*" + globbed.extend(glob(prefix)) + for x in sorted(globbed): + if os.path.isdir(x): + x += "/" + # append stripping the prefix (like bash, not like compgen) + completion.append(x[prefix_dir:]) + return completion + + +if os.environ.get("_ARGCOMPLETE"): + try: + import argcomplete.completers + except ImportError: + sys.exit(-1) + filescompleter = FastFilesCompleter() + + def try_argcomplete(parser): + argcomplete.autocomplete(parser, always_complete_options=False) + + +else: + + def try_argcomplete(parser): + pass + + filescompleter = None diff --git a/venv/lib/python2.7/site-packages/_pytest/_code/__init__.py b/venv/lib/python2.7/site-packages/_pytest/_code/__init__.py new file mode 100644 index 0000000..1394b2b --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/_code/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +""" python inspection/code generation API """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from .code import Code # noqa +from .code import ExceptionInfo # noqa +from .code import filter_traceback # noqa +from .code import Frame # noqa +from .code import getrawcode # noqa +from .code import Traceback # noqa +from .source import compile_ as compile # noqa +from .source import getfslineno # noqa +from .source import Source # noqa diff --git a/venv/lib/python2.7/site-packages/_pytest/_code/_py2traceback.py b/venv/lib/python2.7/site-packages/_pytest/_code/_py2traceback.py new file mode 100644 index 0000000..faacc02 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/_code/_py2traceback.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# copied from python-2.7.3's traceback.py +# CHANGES: +# - some_str is replaced, trying to create unicode strings +# +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import types + +from six import text_type + + +def format_exception_only(etype, value): + """Format the exception part of a traceback. + + The arguments are the exception type and value such as given by + sys.last_type and sys.last_value. The return value is a list of + strings, each ending in a newline. + + Normally, the list contains a single string; however, for + SyntaxError exceptions, it contains several lines that (when + printed) display detailed information about where the syntax + error occurred. + + The message indicating which exception occurred is always the last + string in the list. + + """ + + # An instance should not have a meaningful value parameter, but + # sometimes does, particularly for string exceptions, such as + # >>> raise string1, string2 # deprecated + # + # Clear these out first because issubtype(string1, SyntaxError) + # would throw another exception and mask the original problem. + if ( + isinstance(etype, BaseException) + or isinstance(etype, types.InstanceType) + or etype is None + or type(etype) is str + ): + return [_format_final_exc_line(etype, value)] + + stype = etype.__name__ + + if not issubclass(etype, SyntaxError): + return [_format_final_exc_line(stype, value)] + + # It was a syntax error; show exactly where the problem was found. + lines = [] + try: + msg, (filename, lineno, offset, badline) = value.args + except Exception: + pass + else: + filename = filename or "" + lines.append(' File "{}", line {}\n'.format(filename, lineno)) + if badline is not None: + if isinstance(badline, bytes): # python 2 only + badline = badline.decode("utf-8", "replace") + lines.append(" {}\n".format(badline.strip())) + if offset is not None: + caretspace = badline.rstrip("\n")[:offset].lstrip() + # non-space whitespace (likes tabs) must be kept for alignment + caretspace = ((c.isspace() and c or " ") for c in caretspace) + # only three spaces to account for offset1 == pos 0 + lines.append(" {}^\n".format("".join(caretspace))) + value = msg + + lines.append(_format_final_exc_line(stype, value)) + return lines + + +def _format_final_exc_line(etype, value): + """Return a list of a single line -- normal case for format_exception_only""" + valuestr = _some_str(value) + if value is None or not valuestr: + line = "{}\n".format(etype) + else: + line = "{}: {}\n".format(etype, valuestr) + return line + + +def _some_str(value): + try: + return text_type(value) + except Exception: + try: + return bytes(value).decode("UTF-8", "replace") + except Exception: + pass + return "".format(type(value).__name__) diff --git a/venv/lib/python2.7/site-packages/_pytest/_code/code.py b/venv/lib/python2.7/site-packages/_pytest/_code/code.py new file mode 100644 index 0000000..175d6fd --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/_code/code.py @@ -0,0 +1,1093 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import inspect +import re +import sys +import traceback +from inspect import CO_VARARGS +from inspect import CO_VARKEYWORDS +from weakref import ref + +import attr +import pluggy +import py +from six import text_type + +import _pytest +from _pytest._io.saferepr import safeformat +from _pytest._io.saferepr import saferepr +from _pytest.compat import _PY2 +from _pytest.compat import _PY3 +from _pytest.compat import PY35 +from _pytest.compat import safe_str + +if _PY3: + from traceback import format_exception_only +else: + from ._py2traceback import format_exception_only + + +class Code(object): + """ wrapper around Python code objects """ + + def __init__(self, rawcode): + if not hasattr(rawcode, "co_filename"): + rawcode = getrawcode(rawcode) + try: + self.filename = rawcode.co_filename + self.firstlineno = rawcode.co_firstlineno - 1 + self.name = rawcode.co_name + except AttributeError: + raise TypeError("not a code object: %r" % (rawcode,)) + self.raw = rawcode + + def __eq__(self, other): + return self.raw == other.raw + + __hash__ = None + + def __ne__(self, other): + return not self == other + + @property + def path(self): + """ return a path object pointing to source code (note that it + might not point to an actually existing file). """ + try: + p = py.path.local(self.raw.co_filename) + # maybe don't try this checking + if not p.check(): + raise OSError("py.path check failed.") + except OSError: + # XXX maybe try harder like the weird logic + # in the standard lib [linecache.updatecache] does? + p = self.raw.co_filename + + return p + + @property + def fullsource(self): + """ return a _pytest._code.Source object for the full source file of the code + """ + from _pytest._code import source + + full, _ = source.findsource(self.raw) + return full + + def source(self): + """ return a _pytest._code.Source object for the code object's source only + """ + # return source only for that part of code + import _pytest._code + + return _pytest._code.Source(self.raw) + + def getargs(self, var=False): + """ return a tuple with the argument names for the code object + + if 'var' is set True also return the names of the variable and + keyword arguments when present + """ + # handfull shortcut for getting args + raw = self.raw + argcount = raw.co_argcount + if var: + argcount += raw.co_flags & CO_VARARGS + argcount += raw.co_flags & CO_VARKEYWORDS + return raw.co_varnames[:argcount] + + +class Frame(object): + """Wrapper around a Python frame holding f_locals and f_globals + in which expressions can be evaluated.""" + + def __init__(self, frame): + self.lineno = frame.f_lineno - 1 + self.f_globals = frame.f_globals + self.f_locals = frame.f_locals + self.raw = frame + self.code = Code(frame.f_code) + + @property + def statement(self): + """ statement this frame is at """ + import _pytest._code + + if self.code.fullsource is None: + return _pytest._code.Source("") + return self.code.fullsource.getstatement(self.lineno) + + def eval(self, code, **vars): + """ evaluate 'code' in the frame + + 'vars' are optional additional local variables + + returns the result of the evaluation + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + return eval(code, self.f_globals, f_locals) + + def exec_(self, code, **vars): + """ exec 'code' in the frame + + 'vars' are optiona; additional local variables + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + exec(code, self.f_globals, f_locals) + + def repr(self, object): + """ return a 'safe' (non-recursive, one-line) string repr for 'object' + """ + return saferepr(object) + + def is_true(self, object): + return object + + def getargs(self, var=False): + """ return a list of tuples (name, value) for all arguments + + if 'var' is set True also include the variable and keyword + arguments when present + """ + retval = [] + for arg in self.code.getargs(var): + try: + retval.append((arg, self.f_locals[arg])) + except KeyError: + pass # this can occur when using Psyco + return retval + + +class TracebackEntry(object): + """ a single entry in a traceback """ + + _repr_style = None + exprinfo = None + + def __init__(self, rawentry, excinfo=None): + self._excinfo = excinfo + self._rawentry = rawentry + self.lineno = rawentry.tb_lineno - 1 + + def set_repr_style(self, mode): + assert mode in ("short", "long") + self._repr_style = mode + + @property + def frame(self): + import _pytest._code + + return _pytest._code.Frame(self._rawentry.tb_frame) + + @property + def relline(self): + return self.lineno - self.frame.code.firstlineno + + def __repr__(self): + return "" % (self.frame.code.path, self.lineno + 1) + + @property + def statement(self): + """ _pytest._code.Source object for the current statement """ + source = self.frame.code.fullsource + return source.getstatement(self.lineno) + + @property + def path(self): + """ path to the source code """ + return self.frame.code.path + + def getlocals(self): + return self.frame.f_locals + + locals = property(getlocals, None, None, "locals of underlaying frame") + + def getfirstlinesource(self): + # on Jython this firstlineno can be -1 apparently + return max(self.frame.code.firstlineno, 0) + + def getsource(self, astcache=None): + """ return failing source code. """ + # we use the passed in astcache to not reparse asttrees + # within exception info printing + from _pytest._code.source import getstatementrange_ast + + source = self.frame.code.fullsource + if source is None: + return None + key = astnode = None + if astcache is not None: + key = self.frame.code.path + if key is not None: + astnode = astcache.get(key, None) + start = self.getfirstlinesource() + try: + astnode, _, end = getstatementrange_ast( + self.lineno, source, astnode=astnode + ) + except SyntaxError: + end = self.lineno + 1 + else: + if key is not None: + astcache[key] = astnode + return source[start:end] + + source = property(getsource) + + def ishidden(self): + """ return True if the current frame has a var __tracebackhide__ + resolving to True. + + If __tracebackhide__ is a callable, it gets called with the + ExceptionInfo instance and can decide whether to hide the traceback. + + mostly for internal use + """ + f = self.frame + tbh = f.f_locals.get( + "__tracebackhide__", f.f_globals.get("__tracebackhide__", False) + ) + if tbh and callable(tbh): + return tbh(None if self._excinfo is None else self._excinfo()) + return tbh + + def __str__(self): + try: + fn = str(self.path) + except py.error.Error: + fn = "???" + name = self.frame.code.name + try: + line = str(self.statement).lstrip() + except KeyboardInterrupt: + raise + except: # noqa + line = "???" + return " File %r:%d in %s\n %s\n" % (fn, self.lineno + 1, name, line) + + def name(self): + return self.frame.code.raw.co_name + + name = property(name, None, None, "co_name of underlaying code") + + +class Traceback(list): + """ Traceback objects encapsulate and offer higher level + access to Traceback entries. + """ + + Entry = TracebackEntry + + def __init__(self, tb, excinfo=None): + """ initialize from given python traceback object and ExceptionInfo """ + self._excinfo = excinfo + if hasattr(tb, "tb_next"): + + def f(cur): + while cur is not None: + yield self.Entry(cur, excinfo=excinfo) + cur = cur.tb_next + + list.__init__(self, f(tb)) + else: + list.__init__(self, tb) + + def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None): + """ return a Traceback instance wrapping part of this Traceback + + by provding any combination of path, lineno and firstlineno, the + first frame to start the to-be-returned traceback is determined + + this allows cutting the first part of a Traceback instance e.g. + for formatting reasons (removing some uninteresting bits that deal + with handling of the exception/traceback) + """ + for x in self: + code = x.frame.code + codepath = code.path + if ( + (path is None or codepath == path) + and ( + excludepath is None + or not hasattr(codepath, "relto") + or not codepath.relto(excludepath) + ) + and (lineno is None or x.lineno == lineno) + and (firstlineno is None or x.frame.code.firstlineno == firstlineno) + ): + return Traceback(x._rawentry, self._excinfo) + return self + + def __getitem__(self, key): + val = super(Traceback, self).__getitem__(key) + if isinstance(key, type(slice(0))): + val = self.__class__(val) + return val + + def filter(self, fn=lambda x: not x.ishidden()): + """ return a Traceback instance with certain items removed + + fn is a function that gets a single argument, a TracebackEntry + instance, and should return True when the item should be added + to the Traceback, False when not + + by default this removes all the TracebackEntries which are hidden + (see ishidden() above) + """ + return Traceback(filter(fn, self), self._excinfo) + + def getcrashentry(self): + """ return last non-hidden traceback entry that lead + to the exception of a traceback. + """ + for i in range(-1, -len(self) - 1, -1): + entry = self[i] + if not entry.ishidden(): + return entry + return self[-1] + + def recursionindex(self): + """ return the index of the frame/TracebackEntry where recursion + originates if appropriate, None if no recursion occurred + """ + cache = {} + for i, entry in enumerate(self): + # id for the code.raw is needed to work around + # the strange metaprogramming in the decorator lib from pypi + # which generates code objects that have hash/value equality + # XXX needs a test + key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno + # print "checking for recursion at", key + values = cache.setdefault(key, []) + if values: + f = entry.frame + loc = f.f_locals + for otherloc in values: + if f.is_true( + f.eval( + co_equal, + __recursioncache_locals_1=loc, + __recursioncache_locals_2=otherloc, + ) + ): + return i + values.append(entry.frame.f_locals) + return None + + +co_equal = compile( + "__recursioncache_locals_1 == __recursioncache_locals_2", "?", "eval" +) + + +@attr.s(repr=False) +class ExceptionInfo(object): + """ wraps sys.exc_info() objects and offers + help for navigating the traceback. + """ + + _assert_start_repr = ( + "AssertionError(u'assert " if _PY2 else "AssertionError('assert " + ) + + _excinfo = attr.ib() + _striptext = attr.ib(default="") + _traceback = attr.ib(default=None) + + @classmethod + def from_current(cls, exprinfo=None): + """returns an ExceptionInfo matching the current traceback + + .. warning:: + + Experimental API + + + :param exprinfo: a text string helping to determine if we should + strip ``AssertionError`` from the output, defaults + to the exception message/``__str__()`` + """ + tup = sys.exc_info() + assert tup[0] is not None, "no current exception" + _striptext = "" + if exprinfo is None and isinstance(tup[1], AssertionError): + exprinfo = getattr(tup[1], "msg", None) + if exprinfo is None: + exprinfo = saferepr(tup[1]) + if exprinfo and exprinfo.startswith(cls._assert_start_repr): + _striptext = "AssertionError: " + + return cls(tup, _striptext) + + @classmethod + def for_later(cls): + """return an unfilled ExceptionInfo + """ + return cls(None) + + @property + def type(self): + """the exception class""" + return self._excinfo[0] + + @property + def value(self): + """the exception value""" + return self._excinfo[1] + + @property + def tb(self): + """the exception raw traceback""" + return self._excinfo[2] + + @property + def typename(self): + """the type name of the exception""" + return self.type.__name__ + + @property + def traceback(self): + """the traceback""" + if self._traceback is None: + self._traceback = Traceback(self.tb, excinfo=ref(self)) + return self._traceback + + @traceback.setter + def traceback(self, value): + self._traceback = value + + def __repr__(self): + if self._excinfo is None: + return "" + return "" % (self.typename, len(self.traceback)) + + def exconly(self, tryshort=False): + """ return the exception as a string + + when 'tryshort' resolves to True, and the exception is a + _pytest._code._AssertionError, only the actual exception part of + the exception representation is returned (so 'AssertionError: ' is + removed from the beginning) + """ + lines = format_exception_only(self.type, self.value) + text = "".join(lines) + text = text.rstrip() + if tryshort: + if text.startswith(self._striptext): + text = text[len(self._striptext) :] + return text + + def errisinstance(self, exc): + """ return True if the exception is an instance of exc """ + return isinstance(self.value, exc) + + def _getreprcrash(self): + exconly = self.exconly(tryshort=True) + entry = self.traceback.getcrashentry() + path, lineno = entry.frame.code.raw.co_filename, entry.lineno + return ReprFileLocation(path, lineno + 1, exconly) + + def getrepr( + self, + showlocals=False, + style="long", + abspath=False, + tbfilter=True, + funcargs=False, + truncate_locals=True, + chain=True, + ): + """ + Return str()able representation of this exception info. + + :param bool showlocals: + Show locals per traceback entry. + Ignored if ``style=="native"``. + + :param str style: long|short|no|native traceback style + + :param bool abspath: + If paths should be changed to absolute or left unchanged. + + :param bool tbfilter: + Hide entries that contain a local variable ``__tracebackhide__==True``. + Ignored if ``style=="native"``. + + :param bool funcargs: + Show fixtures ("funcargs" for legacy purposes) per traceback entry. + + :param bool truncate_locals: + With ``showlocals==True``, make sure locals can be safely represented as strings. + + :param bool chain: if chained exceptions in Python 3 should be shown. + + .. versionchanged:: 3.9 + + Added the ``chain`` parameter. + """ + if style == "native": + return ReprExceptionInfo( + ReprTracebackNative( + traceback.format_exception( + self.type, self.value, self.traceback[0]._rawentry + ) + ), + self._getreprcrash(), + ) + + fmt = FormattedExcinfo( + showlocals=showlocals, + style=style, + abspath=abspath, + tbfilter=tbfilter, + funcargs=funcargs, + truncate_locals=truncate_locals, + chain=chain, + ) + return fmt.repr_excinfo(self) + + def __str__(self): + if self._excinfo is None: + return repr(self) + entry = self.traceback[-1] + loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly()) + return str(loc) + + def __unicode__(self): + entry = self.traceback[-1] + loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly()) + return text_type(loc) + + def match(self, regexp): + """ + Check whether the regular expression 'regexp' is found in the string + representation of the exception using ``re.search``. If it matches + then True is returned (so that it is possible to write + ``assert excinfo.match()``). If it doesn't match an AssertionError is + raised. + """ + __tracebackhide__ = True + value = ( + text_type(self.value) if isinstance(regexp, text_type) else str(self.value) + ) + if not re.search(regexp, value): + raise AssertionError( + u"Pattern {!r} not found in {!r}".format(regexp, value) + ) + return True + + +@attr.s +class FormattedExcinfo(object): + """ presenting information about failing Functions and Generators. """ + + # for traceback entries + flow_marker = ">" + fail_marker = "E" + + showlocals = attr.ib(default=False) + style = attr.ib(default="long") + abspath = attr.ib(default=True) + tbfilter = attr.ib(default=True) + funcargs = attr.ib(default=False) + truncate_locals = attr.ib(default=True) + chain = attr.ib(default=True) + astcache = attr.ib(default=attr.Factory(dict), init=False, repr=False) + + def _getindent(self, source): + # figure out indent for given source + try: + s = str(source.getstatement(len(source) - 1)) + except KeyboardInterrupt: + raise + except: # noqa + try: + s = str(source[-1]) + except KeyboardInterrupt: + raise + except: # noqa + return 0 + return 4 + (len(s) - len(s.lstrip())) + + def _getentrysource(self, entry): + source = entry.getsource(self.astcache) + if source is not None: + source = source.deindent() + return source + + def repr_args(self, entry): + if self.funcargs: + args = [] + for argname, argvalue in entry.frame.getargs(var=True): + args.append((argname, saferepr(argvalue))) + return ReprFuncArgs(args) + + def get_source(self, source, line_index=-1, excinfo=None, short=False): + """ return formatted and marked up source lines. """ + import _pytest._code + + lines = [] + if source is None or line_index >= len(source.lines): + source = _pytest._code.Source("???") + line_index = 0 + if line_index < 0: + line_index += len(source) + space_prefix = " " + if short: + lines.append(space_prefix + source.lines[line_index].strip()) + else: + for line in source.lines[:line_index]: + lines.append(space_prefix + line) + lines.append(self.flow_marker + " " + source.lines[line_index]) + for line in source.lines[line_index + 1 :]: + lines.append(space_prefix + line) + if excinfo is not None: + indent = 4 if short else self._getindent(source) + lines.extend(self.get_exconly(excinfo, indent=indent, markall=True)) + return lines + + def get_exconly(self, excinfo, indent=4, markall=False): + lines = [] + indent = " " * indent + # get the real exception information out + exlines = excinfo.exconly(tryshort=True).split("\n") + failindent = self.fail_marker + indent[1:] + for line in exlines: + lines.append(failindent + line) + if not markall: + failindent = indent + return lines + + def repr_locals(self, locals): + if self.showlocals: + lines = [] + keys = [loc for loc in locals if loc[0] != "@"] + keys.sort() + for name in keys: + value = locals[name] + if name == "__builtins__": + lines.append("__builtins__ = ") + else: + # This formatting could all be handled by the + # _repr() function, which is only reprlib.Repr in + # disguise, so is very configurable. + if self.truncate_locals: + str_repr = saferepr(value) + else: + str_repr = safeformat(value) + # if len(str_repr) < 70 or not isinstance(value, + # (list, tuple, dict)): + lines.append("%-10s = %s" % (name, str_repr)) + # else: + # self._line("%-10s =\\" % (name,)) + # # XXX + # pprint.pprint(value, stream=self.excinfowriter) + return ReprLocals(lines) + + def repr_traceback_entry(self, entry, excinfo=None): + import _pytest._code + + source = self._getentrysource(entry) + if source is None: + source = _pytest._code.Source("???") + line_index = 0 + else: + # entry.getfirstlinesource() can be -1, should be 0 on jython + line_index = entry.lineno - max(entry.getfirstlinesource(), 0) + + lines = [] + style = entry._repr_style + if style is None: + style = self.style + if style in ("short", "long"): + short = style == "short" + reprargs = self.repr_args(entry) if not short else None + s = self.get_source(source, line_index, excinfo, short=short) + lines.extend(s) + if short: + message = "in %s" % (entry.name) + else: + message = excinfo and excinfo.typename or "" + path = self._makepath(entry.path) + filelocrepr = ReprFileLocation(path, entry.lineno + 1, message) + localsrepr = None + if not short: + localsrepr = self.repr_locals(entry.locals) + return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style) + if excinfo: + lines.extend(self.get_exconly(excinfo, indent=4)) + return ReprEntry(lines, None, None, None, style) + + def _makepath(self, path): + if not self.abspath: + try: + np = py.path.local().bestrelpath(path) + except OSError: + return path + if len(np) < len(str(path)): + path = np + return path + + def repr_traceback(self, excinfo): + traceback = excinfo.traceback + if self.tbfilter: + traceback = traceback.filter() + + if is_recursion_error(excinfo): + traceback, extraline = self._truncate_recursive_traceback(traceback) + else: + extraline = None + + last = traceback[-1] + entries = [] + for index, entry in enumerate(traceback): + einfo = (last == entry) and excinfo or None + reprentry = self.repr_traceback_entry(entry, einfo) + entries.append(reprentry) + return ReprTraceback(entries, extraline, style=self.style) + + def _truncate_recursive_traceback(self, traceback): + """ + Truncate the given recursive traceback trying to find the starting point + of the recursion. + + The detection is done by going through each traceback entry and finding the + point in which the locals of the frame are equal to the locals of a previous frame (see ``recursionindex()``. + + Handle the situation where the recursion process might raise an exception (for example + comparing numpy arrays using equality raises a TypeError), in which case we do our best to + warn the user of the error and show a limited traceback. + """ + try: + recursionindex = traceback.recursionindex() + except Exception as e: + max_frames = 10 + extraline = ( + "!!! Recursion error detected, but an error occurred locating the origin of recursion.\n" + " The following exception happened when comparing locals in the stack frame:\n" + " {exc_type}: {exc_msg}\n" + " Displaying first and last {max_frames} stack frames out of {total}." + ).format( + exc_type=type(e).__name__, + exc_msg=safe_str(e), + max_frames=max_frames, + total=len(traceback), + ) + traceback = traceback[:max_frames] + traceback[-max_frames:] + else: + if recursionindex is not None: + extraline = "!!! Recursion detected (same locals & position)" + traceback = traceback[: recursionindex + 1] + else: + extraline = None + + return traceback, extraline + + def repr_excinfo(self, excinfo): + if _PY2: + reprtraceback = self.repr_traceback(excinfo) + reprcrash = excinfo._getreprcrash() + + return ReprExceptionInfo(reprtraceback, reprcrash) + else: + repr_chain = [] + e = excinfo.value + descr = None + seen = set() + while e is not None and id(e) not in seen: + seen.add(id(e)) + if excinfo: + reprtraceback = self.repr_traceback(excinfo) + reprcrash = excinfo._getreprcrash() + else: + # fallback to native repr if the exception doesn't have a traceback: + # ExceptionInfo objects require a full traceback to work + reprtraceback = ReprTracebackNative( + traceback.format_exception(type(e), e, None) + ) + reprcrash = None + + repr_chain += [(reprtraceback, reprcrash, descr)] + if e.__cause__ is not None and self.chain: + e = e.__cause__ + excinfo = ( + ExceptionInfo((type(e), e, e.__traceback__)) + if e.__traceback__ + else None + ) + descr = "The above exception was the direct cause of the following exception:" + elif ( + e.__context__ is not None + and not e.__suppress_context__ + and self.chain + ): + e = e.__context__ + excinfo = ( + ExceptionInfo((type(e), e, e.__traceback__)) + if e.__traceback__ + else None + ) + descr = "During handling of the above exception, another exception occurred:" + else: + e = None + repr_chain.reverse() + return ExceptionChainRepr(repr_chain) + + +class TerminalRepr(object): + def __str__(self): + s = self.__unicode__() + if _PY2: + s = s.encode("utf-8") + return s + + def __unicode__(self): + # FYI this is called from pytest-xdist's serialization of exception + # information. + io = py.io.TextIO() + tw = py.io.TerminalWriter(file=io) + self.toterminal(tw) + return io.getvalue().strip() + + def __repr__(self): + return "<%s instance at %0x>" % (self.__class__, id(self)) + + +class ExceptionRepr(TerminalRepr): + def __init__(self): + self.sections = [] + + def addsection(self, name, content, sep="-"): + self.sections.append((name, content, sep)) + + def toterminal(self, tw): + for name, content, sep in self.sections: + tw.sep(sep, name) + tw.line(content) + + +class ExceptionChainRepr(ExceptionRepr): + def __init__(self, chain): + super(ExceptionChainRepr, self).__init__() + self.chain = chain + # reprcrash and reprtraceback of the outermost (the newest) exception + # in the chain + self.reprtraceback = chain[-1][0] + self.reprcrash = chain[-1][1] + + def toterminal(self, tw): + for element in self.chain: + element[0].toterminal(tw) + if element[2] is not None: + tw.line("") + tw.line(element[2], yellow=True) + super(ExceptionChainRepr, self).toterminal(tw) + + +class ReprExceptionInfo(ExceptionRepr): + def __init__(self, reprtraceback, reprcrash): + super(ReprExceptionInfo, self).__init__() + self.reprtraceback = reprtraceback + self.reprcrash = reprcrash + + def toterminal(self, tw): + self.reprtraceback.toterminal(tw) + super(ReprExceptionInfo, self).toterminal(tw) + + +class ReprTraceback(TerminalRepr): + entrysep = "_ " + + def __init__(self, reprentries, extraline, style): + self.reprentries = reprentries + self.extraline = extraline + self.style = style + + def toterminal(self, tw): + # the entries might have different styles + for i, entry in enumerate(self.reprentries): + if entry.style == "long": + tw.line("") + entry.toterminal(tw) + if i < len(self.reprentries) - 1: + next_entry = self.reprentries[i + 1] + if ( + entry.style == "long" + or entry.style == "short" + and next_entry.style == "long" + ): + tw.sep(self.entrysep) + + if self.extraline: + tw.line(self.extraline) + + +class ReprTracebackNative(ReprTraceback): + def __init__(self, tblines): + self.style = "native" + self.reprentries = [ReprEntryNative(tblines)] + self.extraline = None + + +class ReprEntryNative(TerminalRepr): + style = "native" + + def __init__(self, tblines): + self.lines = tblines + + def toterminal(self, tw): + tw.write("".join(self.lines)) + + +class ReprEntry(TerminalRepr): + def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style): + self.lines = lines + self.reprfuncargs = reprfuncargs + self.reprlocals = reprlocals + self.reprfileloc = filelocrepr + self.style = style + + def toterminal(self, tw): + if self.style == "short": + self.reprfileloc.toterminal(tw) + for line in self.lines: + red = line.startswith("E ") + tw.line(line, bold=True, red=red) + # tw.line("") + return + if self.reprfuncargs: + self.reprfuncargs.toterminal(tw) + for line in self.lines: + red = line.startswith("E ") + tw.line(line, bold=True, red=red) + if self.reprlocals: + tw.line("") + self.reprlocals.toterminal(tw) + if self.reprfileloc: + if self.lines: + tw.line("") + self.reprfileloc.toterminal(tw) + + def __str__(self): + return "%s\n%s\n%s" % ("\n".join(self.lines), self.reprlocals, self.reprfileloc) + + +class ReprFileLocation(TerminalRepr): + def __init__(self, path, lineno, message): + self.path = str(path) + self.lineno = lineno + self.message = message + + def toterminal(self, tw): + # filename and lineno output for each entry, + # using an output format that most editors unterstand + msg = self.message + i = msg.find("\n") + if i != -1: + msg = msg[:i] + tw.write(self.path, bold=True, red=True) + tw.line(":%s: %s" % (self.lineno, msg)) + + +class ReprLocals(TerminalRepr): + def __init__(self, lines): + self.lines = lines + + def toterminal(self, tw): + for line in self.lines: + tw.line(line) + + +class ReprFuncArgs(TerminalRepr): + def __init__(self, args): + self.args = args + + def toterminal(self, tw): + if self.args: + linesofar = "" + for name, value in self.args: + ns = "%s = %s" % (safe_str(name), safe_str(value)) + if len(ns) + len(linesofar) + 2 > tw.fullwidth: + if linesofar: + tw.line(linesofar) + linesofar = ns + else: + if linesofar: + linesofar += ", " + ns + else: + linesofar = ns + if linesofar: + tw.line(linesofar) + tw.line("") + + +def getrawcode(obj, trycall=True): + """ return code object for given function. """ + try: + return obj.__code__ + except AttributeError: + obj = getattr(obj, "im_func", obj) + obj = getattr(obj, "func_code", obj) + obj = getattr(obj, "f_code", obj) + obj = getattr(obj, "__code__", obj) + if trycall and not hasattr(obj, "co_firstlineno"): + if hasattr(obj, "__call__") and not inspect.isclass(obj): + x = getrawcode(obj.__call__, trycall=False) + if hasattr(x, "co_firstlineno"): + return x + return obj + + +if PY35: # RecursionError introduced in 3.5 + + def is_recursion_error(excinfo): + return excinfo.errisinstance(RecursionError) # noqa + + +else: + + def is_recursion_error(excinfo): + if not excinfo.errisinstance(RuntimeError): + return False + try: + return "maximum recursion depth exceeded" in str(excinfo.value) + except UnicodeError: + return False + + +# relative paths that we use to filter traceback entries from appearing to the user; +# see filter_traceback +# note: if we need to add more paths than what we have now we should probably use a list +# for better maintenance + +_PLUGGY_DIR = py.path.local(pluggy.__file__.rstrip("oc")) +# pluggy is either a package or a single module depending on the version +if _PLUGGY_DIR.basename == "__init__.py": + _PLUGGY_DIR = _PLUGGY_DIR.dirpath() +_PYTEST_DIR = py.path.local(_pytest.__file__).dirpath() +_PY_DIR = py.path.local(py.__file__).dirpath() + + +def filter_traceback(entry): + """Return True if a TracebackEntry instance should be removed from tracebacks: + * dynamically generated code (no code to show up for it); + * internal traceback from pytest or its internal libraries, py and pluggy. + """ + # entry.path might sometimes return a str object when the entry + # points to dynamically generated code + # see https://bitbucket.org/pytest-dev/py/issues/71 + raw_filename = entry.frame.code.raw.co_filename + is_generated = "<" in raw_filename and ">" in raw_filename + if is_generated: + return False + # entry.path might point to a non-existing file, in which case it will + # also return a str object. see #1133 + p = py.path.local(entry.path) + return ( + not p.relto(_PLUGGY_DIR) and not p.relto(_PYTEST_DIR) and not p.relto(_PY_DIR) + ) diff --git a/venv/lib/python2.7/site-packages/_pytest/_code/source.py b/venv/lib/python2.7/site-packages/_pytest/_code/source.py new file mode 100644 index 0000000..b35e97b --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/_code/source.py @@ -0,0 +1,324 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import ast +import inspect +import linecache +import sys +import textwrap +import tokenize +import warnings +from ast import PyCF_ONLY_AST as _AST_FLAG +from bisect import bisect_right + +import py +import six + + +class Source(object): + """ an immutable object holding a source code fragment, + possibly deindenting it. + """ + + _compilecounter = 0 + + def __init__(self, *parts, **kwargs): + self.lines = lines = [] + de = kwargs.get("deindent", True) + for part in parts: + if not part: + partlines = [] + elif isinstance(part, Source): + partlines = part.lines + elif isinstance(part, (tuple, list)): + partlines = [x.rstrip("\n") for x in part] + elif isinstance(part, six.string_types): + partlines = part.split("\n") + else: + partlines = getsource(part, deindent=de).lines + if de: + partlines = deindent(partlines) + lines.extend(partlines) + + def __eq__(self, other): + try: + return self.lines == other.lines + except AttributeError: + if isinstance(other, str): + return str(self) == other + return False + + __hash__ = None + + def __getitem__(self, key): + if isinstance(key, int): + return self.lines[key] + else: + if key.step not in (None, 1): + raise IndexError("cannot slice a Source with a step") + newsource = Source() + newsource.lines = self.lines[key.start : key.stop] + return newsource + + def __len__(self): + return len(self.lines) + + def strip(self): + """ return new source object with trailing + and leading blank lines removed. + """ + start, end = 0, len(self) + while start < end and not self.lines[start].strip(): + start += 1 + while end > start and not self.lines[end - 1].strip(): + end -= 1 + source = Source() + source.lines[:] = self.lines[start:end] + return source + + def putaround(self, before="", after="", indent=" " * 4): + """ return a copy of the source object with + 'before' and 'after' wrapped around it. + """ + before = Source(before) + after = Source(after) + newsource = Source() + lines = [(indent + line) for line in self.lines] + newsource.lines = before.lines + lines + after.lines + return newsource + + def indent(self, indent=" " * 4): + """ return a copy of the source object with + all lines indented by the given indent-string. + """ + newsource = Source() + newsource.lines = [(indent + line) for line in self.lines] + return newsource + + def getstatement(self, lineno): + """ return Source statement which contains the + given linenumber (counted from 0). + """ + start, end = self.getstatementrange(lineno) + return self[start:end] + + def getstatementrange(self, lineno): + """ return (start, end) tuple which spans the minimal + statement region which containing the given lineno. + """ + if not (0 <= lineno < len(self)): + raise IndexError("lineno out of range") + ast, start, end = getstatementrange_ast(lineno, self) + return start, end + + def deindent(self): + """return a new source object deindented.""" + newsource = Source() + newsource.lines[:] = deindent(self.lines) + return newsource + + def isparseable(self, deindent=True): + """ return True if source is parseable, heuristically + deindenting it by default. + """ + if deindent: + source = str(self.deindent()) + else: + source = str(self) + try: + ast.parse(source) + except (SyntaxError, ValueError, TypeError): + return False + else: + return True + + def __str__(self): + return "\n".join(self.lines) + + def compile( + self, filename=None, mode="exec", flag=0, dont_inherit=0, _genframe=None + ): + """ return compiled code object. if filename is None + invent an artificial filename which displays + the source/line position of the caller frame. + """ + if not filename or py.path.local(filename).check(file=0): + if _genframe is None: + _genframe = sys._getframe(1) # the caller + fn, lineno = _genframe.f_code.co_filename, _genframe.f_lineno + base = "<%d-codegen " % self._compilecounter + self.__class__._compilecounter += 1 + if not filename: + filename = base + "%s:%d>" % (fn, lineno) + else: + filename = base + "%r %s:%d>" % (filename, fn, lineno) + source = "\n".join(self.lines) + "\n" + try: + co = compile(source, filename, mode, flag) + except SyntaxError: + ex = sys.exc_info()[1] + # re-represent syntax errors from parsing python strings + msglines = self.lines[: ex.lineno] + if ex.offset: + msglines.append(" " * ex.offset + "^") + msglines.append("(code was compiled probably from here: %s)" % filename) + newex = SyntaxError("\n".join(msglines)) + newex.offset = ex.offset + newex.lineno = ex.lineno + newex.text = ex.text + raise newex + else: + if flag & _AST_FLAG: + return co + lines = [(x + "\n") for x in self.lines] + linecache.cache[filename] = (1, None, lines, filename) + return co + + +# +# public API shortcut functions +# + + +def compile_(source, filename=None, mode="exec", flags=0, dont_inherit=0): + """ compile the given source to a raw code object, + and maintain an internal cache which allows later + retrieval of the source code for the code object + and any recursively created code objects. + """ + if isinstance(source, ast.AST): + # XXX should Source support having AST? + return compile(source, filename, mode, flags, dont_inherit) + _genframe = sys._getframe(1) # the caller + s = Source(source) + co = s.compile(filename, mode, flags, _genframe=_genframe) + return co + + +def getfslineno(obj): + """ Return source location (path, lineno) for the given object. + If the source cannot be determined return ("", -1). + + The line number is 0-based. + """ + from .code import Code + + try: + code = Code(obj) + except TypeError: + try: + fn = inspect.getsourcefile(obj) or inspect.getfile(obj) + except TypeError: + return "", -1 + + fspath = fn and py.path.local(fn) or None + lineno = -1 + if fspath: + try: + _, lineno = findsource(obj) + except IOError: + pass + else: + fspath = code.path + lineno = code.firstlineno + assert isinstance(lineno, int) + return fspath, lineno + + +# +# helper functions +# + + +def findsource(obj): + try: + sourcelines, lineno = inspect.findsource(obj) + except Exception: + return None, -1 + source = Source() + source.lines = [line.rstrip() for line in sourcelines] + return source, lineno + + +def getsource(obj, **kwargs): + from .code import getrawcode + + obj = getrawcode(obj) + try: + strsrc = inspect.getsource(obj) + except IndentationError: + strsrc = '"Buggy python version consider upgrading, cannot get source"' + assert isinstance(strsrc, str) + return Source(strsrc, **kwargs) + + +def deindent(lines): + return textwrap.dedent("\n".join(lines)).splitlines() + + +def get_statement_startend2(lineno, node): + import ast + + # flatten all statements and except handlers into one lineno-list + # AST's line numbers start indexing at 1 + values = [] + for x in ast.walk(node): + if isinstance(x, (ast.stmt, ast.ExceptHandler)): + values.append(x.lineno - 1) + for name in ("finalbody", "orelse"): + val = getattr(x, name, None) + if val: + # treat the finally/orelse part as its own statement + values.append(val[0].lineno - 1 - 1) + values.sort() + insert_index = bisect_right(values, lineno) + start = values[insert_index - 1] + if insert_index >= len(values): + end = None + else: + end = values[insert_index] + return start, end + + +def getstatementrange_ast(lineno, source, assertion=False, astnode=None): + if astnode is None: + content = str(source) + # See #4260: + # don't produce duplicate warnings when compiling source to find ast + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + astnode = compile(content, "source", "exec", _AST_FLAG) + + start, end = get_statement_startend2(lineno, astnode) + # we need to correct the end: + # - ast-parsing strips comments + # - there might be empty lines + # - we might have lesser indented code blocks at the end + if end is None: + end = len(source.lines) + + if end > start + 1: + # make sure we don't span differently indented code blocks + # by using the BlockFinder helper used which inspect.getsource() uses itself + block_finder = inspect.BlockFinder() + # if we start with an indented line, put blockfinder to "started" mode + block_finder.started = source.lines[start][0].isspace() + it = ((x + "\n") for x in source.lines[start:end]) + try: + for tok in tokenize.generate_tokens(lambda: next(it)): + block_finder.tokeneater(*tok) + except (inspect.EndOfBlock, IndentationError): + end = block_finder.last + start + except Exception: + pass + + # the end might still point to a comment or empty line, correct it + while end: + line = source.lines[end - 1].lstrip() + if line.startswith("#") or not line: + end -= 1 + else: + break + return astnode, start, end diff --git a/venv/lib/python2.7/site-packages/_pytest/_io/__init__.py b/venv/lib/python2.7/site-packages/_pytest/_io/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python2.7/site-packages/_pytest/_io/saferepr.py b/venv/lib/python2.7/site-packages/_pytest/_io/saferepr.py new file mode 100644 index 0000000..9b412dc --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/_io/saferepr.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +import pprint + +from six.moves import reprlib + + +def _call_and_format_exception(call, x, *args): + try: + # Try the vanilla repr and make sure that the result is a string + return call(x, *args) + except Exception as exc: + exc_name = type(exc).__name__ + try: + exc_info = str(exc) + except Exception: + exc_info = "unknown" + return '<[%s("%s") raised in repr()] %s object at 0x%x>' % ( + exc_name, + exc_info, + x.__class__.__name__, + id(x), + ) + + +class SafeRepr(reprlib.Repr): + """subclass of repr.Repr that limits the resulting size of repr() + and includes information on exceptions raised during the call. + """ + + def repr(self, x): + return self._callhelper(reprlib.Repr.repr, self, x) + + def repr_unicode(self, x, level): + # Strictly speaking wrong on narrow builds + def repr(u): + if "'" not in u: + return u"'%s'" % u + elif '"' not in u: + return u'"%s"' % u + else: + return u"'%s'" % u.replace("'", r"\'") + + s = repr(x[: self.maxstring]) + if len(s) > self.maxstring: + i = max(0, (self.maxstring - 3) // 2) + j = max(0, self.maxstring - 3 - i) + s = repr(x[:i] + x[len(x) - j :]) + s = s[:i] + "..." + s[len(s) - j :] + return s + + def repr_instance(self, x, level): + return self._callhelper(repr, x) + + def _callhelper(self, call, x, *args): + s = _call_and_format_exception(call, x, *args) + if len(s) > self.maxsize: + i = max(0, (self.maxsize - 3) // 2) + j = max(0, self.maxsize - 3 - i) + s = s[:i] + "..." + s[len(s) - j :] + return s + + +def safeformat(obj): + """return a pretty printed string for the given object. + Failing __repr__ functions of user instances will be represented + with a short exception info. + """ + return _call_and_format_exception(pprint.pformat, obj) + + +def saferepr(obj, maxsize=240): + """return a size-limited safe repr-string for the given object. + Failing __repr__ functions of user instances will be represented + with a short exception info and 'saferepr' generally takes + care to never raise exceptions itself. This function is a wrapper + around the Repr/reprlib functionality of the standard 2.6 lib. + """ + # review exception handling + srepr = SafeRepr() + srepr.maxstring = maxsize + srepr.maxsize = maxsize + srepr.maxother = 160 + return srepr.repr(obj) diff --git a/venv/lib/python2.7/site-packages/_pytest/_version.py b/venv/lib/python2.7/site-packages/_pytest/_version.py new file mode 100644 index 0000000..1c03953 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/_version.py @@ -0,0 +1,4 @@ +# coding: utf-8 +# file generated by setuptools_scm +# don't change, don't track in version control +version = '4.6.10' diff --git a/venv/lib/python2.7/site-packages/_pytest/assertion/__init__.py b/venv/lib/python2.7/site-packages/_pytest/assertion/__init__.py new file mode 100644 index 0000000..6b6abb8 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/assertion/__init__.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +""" +support for presenting detailed information in failing assertions. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys + +import six + +from _pytest.assertion import rewrite +from _pytest.assertion import truncate +from _pytest.assertion import util + + +def pytest_addoption(parser): + group = parser.getgroup("debugconfig") + group.addoption( + "--assert", + action="store", + dest="assertmode", + choices=("rewrite", "plain"), + default="rewrite", + metavar="MODE", + help="""Control assertion debugging tools. 'plain' + performs no assertion debugging. 'rewrite' + (the default) rewrites assert statements in + test modules on import to provide assert + expression information.""", + ) + + +def register_assert_rewrite(*names): + """Register one or more module names to be rewritten on import. + + This function will make sure that this module or all modules inside + the package will get their assert statements rewritten. + Thus you should make sure to call this before the module is + actually imported, usually in your __init__.py if you are a plugin + using a package. + + :raise TypeError: if the given module names are not strings. + """ + for name in names: + if not isinstance(name, str): + msg = "expected module names as *args, got {0} instead" + raise TypeError(msg.format(repr(names))) + for hook in sys.meta_path: + if isinstance(hook, rewrite.AssertionRewritingHook): + importhook = hook + break + else: + importhook = DummyRewriteHook() + importhook.mark_rewrite(*names) + + +class DummyRewriteHook(object): + """A no-op import hook for when rewriting is disabled.""" + + def mark_rewrite(self, *names): + pass + + +class AssertionState(object): + """State for the assertion plugin.""" + + def __init__(self, config, mode): + self.mode = mode + self.trace = config.trace.root.get("assertion") + self.hook = None + + +def install_importhook(config): + """Try to install the rewrite hook, raise SystemError if it fails.""" + # Jython has an AST bug that make the assertion rewriting hook malfunction. + if sys.platform.startswith("java"): + raise SystemError("rewrite not supported") + + config._assertstate = AssertionState(config, "rewrite") + config._assertstate.hook = hook = rewrite.AssertionRewritingHook(config) + sys.meta_path.insert(0, hook) + config._assertstate.trace("installed rewrite import hook") + + def undo(): + hook = config._assertstate.hook + if hook is not None and hook in sys.meta_path: + sys.meta_path.remove(hook) + + config.add_cleanup(undo) + return hook + + +def pytest_collection(session): + # this hook is only called when test modules are collected + # so for example not in the master process of pytest-xdist + # (which does not collect test modules) + assertstate = getattr(session.config, "_assertstate", None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(session) + + +def pytest_runtest_setup(item): + """Setup the pytest_assertrepr_compare hook + + The newinterpret and rewrite modules will use util._reprcompare if + it exists to use custom reporting via the + pytest_assertrepr_compare hook. This sets up this custom + comparison for the test. + """ + + def callbinrepr(op, left, right): + """Call the pytest_assertrepr_compare hook and prepare the result + + This uses the first result from the hook and then ensures the + following: + * Overly verbose explanations are truncated unless configured otherwise + (eg. if running in verbose mode). + * Embedded newlines are escaped to help util.format_explanation() + later. + * If the rewrite mode is used embedded %-characters are replaced + to protect later % formatting. + + The result can be formatted by util.format_explanation() for + pretty printing. + """ + hook_result = item.ihook.pytest_assertrepr_compare( + config=item.config, op=op, left=left, right=right + ) + for new_expl in hook_result: + if new_expl: + new_expl = truncate.truncate_if_required(new_expl, item) + new_expl = [line.replace("\n", "\\n") for line in new_expl] + res = six.text_type("\n~").join(new_expl) + if item.config.getvalue("assertmode") == "rewrite": + res = res.replace("%", "%%") + return res + + util._reprcompare = callbinrepr + + +def pytest_runtest_teardown(item): + util._reprcompare = None + + +def pytest_sessionfinish(session): + assertstate = getattr(session.config, "_assertstate", None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(None) + + +# Expose this plugin's implementation for the pytest_assertrepr_compare hook +pytest_assertrepr_compare = util.assertrepr_compare diff --git a/venv/lib/python2.7/site-packages/_pytest/assertion/rewrite.py b/venv/lib/python2.7/site-packages/_pytest/assertion/rewrite.py new file mode 100644 index 0000000..1c6161b --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/assertion/rewrite.py @@ -0,0 +1,1069 @@ +# -*- coding: utf-8 -*- +"""Rewrite assertion AST to produce nice error messages""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import ast +import errno +import imp +import itertools +import marshal +import os +import re +import string +import struct +import sys +import types + +import atomicwrites +import py +import six + +from _pytest._io.saferepr import saferepr +from _pytest.assertion import util +from _pytest.assertion.util import ( # noqa: F401 + format_explanation as _format_explanation, +) +from _pytest.compat import spec_from_file_location +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import PurePath + +# pytest caches rewritten pycs in __pycache__. +if hasattr(imp, "get_tag"): + PYTEST_TAG = imp.get_tag() + "-PYTEST" +else: + if hasattr(sys, "pypy_version_info"): + impl = "pypy" + elif sys.platform == "java": + impl = "jython" + else: + impl = "cpython" + ver = sys.version_info + PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1]) + del ver, impl + +PYC_EXT = ".py" + (__debug__ and "c" or "o") +PYC_TAIL = "." + PYTEST_TAG + PYC_EXT + +ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3 + +if sys.version_info >= (3, 5): + ast_Call = ast.Call +else: + + def ast_Call(a, b, c): + return ast.Call(a, b, c, None, None) + + +class AssertionRewritingHook(object): + """PEP302 Import hook which rewrites asserts.""" + + def __init__(self, config): + self.config = config + try: + self.fnpats = config.getini("python_files") + except ValueError: + self.fnpats = ["test_*.py", "*_test.py"] + self.session = None + self.modules = {} + self._rewritten_names = set() + self._must_rewrite = set() + # flag to guard against trying to rewrite a pyc file while we are already writing another pyc file, + # which might result in infinite recursion (#3506) + self._writing_pyc = False + self._basenames_to_check_rewrite = {"conftest"} + self._marked_for_rewrite_cache = {} + self._session_paths_checked = False + + def set_session(self, session): + self.session = session + self._session_paths_checked = False + + def _imp_find_module(self, name, path=None): + """Indirection so we can mock calls to find_module originated from the hook during testing""" + return imp.find_module(name, path) + + def find_module(self, name, path=None): + if self._writing_pyc: + return None + state = self.config._assertstate + if self._early_rewrite_bailout(name, state): + return None + state.trace("find_module called for: %s" % name) + names = name.rsplit(".", 1) + lastname = names[-1] + pth = None + if path is not None: + # Starting with Python 3.3, path is a _NamespacePath(), which + # causes problems if not converted to list. + path = list(path) + if len(path) == 1: + pth = path[0] + if pth is None: + try: + fd, fn, desc = self._imp_find_module(lastname, path) + except ImportError: + return None + if fd is not None: + fd.close() + tp = desc[2] + if tp == imp.PY_COMPILED: + if hasattr(imp, "source_from_cache"): + try: + fn = imp.source_from_cache(fn) + except ValueError: + # Python 3 doesn't like orphaned but still-importable + # .pyc files. + fn = fn[:-1] + else: + fn = fn[:-1] + elif tp != imp.PY_SOURCE: + # Don't know what this is. + return None + else: + fn = os.path.join(pth, name.rpartition(".")[2] + ".py") + + fn_pypath = py.path.local(fn) + if not self._should_rewrite(name, fn_pypath, state): + return None + + self._rewritten_names.add(name) + + # The requested module looks like a test file, so rewrite it. This is + # the most magical part of the process: load the source, rewrite the + # asserts, and load the rewritten source. We also cache the rewritten + # module code in a special pyc. We must be aware of the possibility of + # concurrent pytest processes rewriting and loading pycs. To avoid + # tricky race conditions, we maintain the following invariant: The + # cached pyc is always a complete, valid pyc. Operations on it must be + # atomic. POSIX's atomic rename comes in handy. + write = not sys.dont_write_bytecode + cache_dir = os.path.join(fn_pypath.dirname, "__pycache__") + if write: + try: + os.mkdir(cache_dir) + except OSError: + e = sys.exc_info()[1].errno + if e == errno.EEXIST: + # Either the __pycache__ directory already exists (the + # common case) or it's blocked by a non-dir node. In the + # latter case, we'll ignore it in _write_pyc. + pass + elif e in [errno.ENOENT, errno.ENOTDIR]: + # One of the path components was not a directory, likely + # because we're in a zip file. + write = False + elif e in [errno.EACCES, errno.EROFS, errno.EPERM]: + state.trace("read only directory: %r" % fn_pypath.dirname) + write = False + else: + raise + cache_name = fn_pypath.basename[:-3] + PYC_TAIL + pyc = os.path.join(cache_dir, cache_name) + # Notice that even if we're in a read-only directory, I'm going + # to check for a cached pyc. This may not be optimal... + co = _read_pyc(fn_pypath, pyc, state.trace) + if co is None: + state.trace("rewriting %r" % (fn,)) + source_stat, co = _rewrite_test(self.config, fn_pypath) + if co is None: + # Probably a SyntaxError in the test. + return None + if write: + self._writing_pyc = True + try: + _write_pyc(state, co, source_stat, pyc) + finally: + self._writing_pyc = False + else: + state.trace("found cached rewritten pyc for %r" % (fn,)) + self.modules[name] = co, pyc + return self + + def _early_rewrite_bailout(self, name, state): + """ + This is a fast way to get out of rewriting modules. Profiling has + shown that the call to imp.find_module (inside of the find_module + from this class) is a major slowdown, so, this method tries to + filter what we're sure won't be rewritten before getting to it. + """ + if self.session is not None and not self._session_paths_checked: + self._session_paths_checked = True + for path in self.session._initialpaths: + # Make something as c:/projects/my_project/path.py -> + # ['c:', 'projects', 'my_project', 'path.py'] + parts = str(path).split(os.path.sep) + # add 'path' to basenames to be checked. + self._basenames_to_check_rewrite.add(os.path.splitext(parts[-1])[0]) + + # Note: conftest already by default in _basenames_to_check_rewrite. + parts = name.split(".") + if parts[-1] in self._basenames_to_check_rewrite: + return False + + # For matching the name it must be as if it was a filename. + path = PurePath(os.path.sep.join(parts) + ".py") + + for pat in self.fnpats: + # if the pattern contains subdirectories ("tests/**.py" for example) we can't bail out based + # on the name alone because we need to match against the full path + if os.path.dirname(pat): + return False + if fnmatch_ex(pat, path): + return False + + if self._is_marked_for_rewrite(name, state): + return False + + state.trace("early skip of rewriting module: %s" % (name,)) + return True + + def _should_rewrite(self, name, fn_pypath, state): + # always rewrite conftest files + fn = str(fn_pypath) + if fn_pypath.basename == "conftest.py": + state.trace("rewriting conftest file: %r" % (fn,)) + return True + + if self.session is not None: + if self.session.isinitpath(fn): + state.trace("matched test file (was specified on cmdline): %r" % (fn,)) + return True + + # modules not passed explicitly on the command line are only + # rewritten if they match the naming convention for test files + for pat in self.fnpats: + if fn_pypath.fnmatch(pat): + state.trace("matched test file %r" % (fn,)) + return True + + return self._is_marked_for_rewrite(name, state) + + def _is_marked_for_rewrite(self, name, state): + try: + return self._marked_for_rewrite_cache[name] + except KeyError: + for marked in self._must_rewrite: + if name == marked or name.startswith(marked + "."): + state.trace("matched marked file %r (from %r)" % (name, marked)) + self._marked_for_rewrite_cache[name] = True + return True + + self._marked_for_rewrite_cache[name] = False + return False + + def mark_rewrite(self, *names): + """Mark import names as needing to be rewritten. + + The named module or package as well as any nested modules will + be rewritten on import. + """ + already_imported = ( + set(names).intersection(sys.modules).difference(self._rewritten_names) + ) + for name in already_imported: + if not AssertionRewriter.is_rewrite_disabled( + sys.modules[name].__doc__ or "" + ): + self._warn_already_imported(name) + self._must_rewrite.update(names) + self._marked_for_rewrite_cache.clear() + + def _warn_already_imported(self, name): + from _pytest.warning_types import PytestAssertRewriteWarning + from _pytest.warnings import _issue_warning_captured + + _issue_warning_captured( + PytestAssertRewriteWarning( + "Module already imported so cannot be rewritten: %s" % name + ), + self.config.hook, + stacklevel=5, + ) + + def load_module(self, name): + co, pyc = self.modules.pop(name) + if name in sys.modules: + # If there is an existing module object named 'fullname' in + # sys.modules, the loader must use that existing module. (Otherwise, + # the reload() builtin will not work correctly.) + mod = sys.modules[name] + else: + # I wish I could just call imp.load_compiled here, but __file__ has to + # be set properly. In Python 3.2+, this all would be handled correctly + # by load_compiled. + mod = sys.modules[name] = imp.new_module(name) + try: + mod.__file__ = co.co_filename + # Normally, this attribute is 3.2+. + mod.__cached__ = pyc + mod.__loader__ = self + # Normally, this attribute is 3.4+ + mod.__spec__ = spec_from_file_location(name, co.co_filename, loader=self) + exec(co, mod.__dict__) + except: # noqa + if name in sys.modules: + del sys.modules[name] + raise + return sys.modules[name] + + def is_package(self, name): + try: + fd, fn, desc = self._imp_find_module(name) + except ImportError: + return False + if fd is not None: + fd.close() + tp = desc[2] + return tp == imp.PKG_DIRECTORY + + def get_data(self, pathname): + """Optional PEP302 get_data API. + """ + with open(pathname, "rb") as f: + return f.read() + + +def _write_pyc(state, co, source_stat, pyc): + # Technically, we don't have to have the same pyc format as + # (C)Python, since these "pycs" should never be seen by builtin + # import. However, there's little reason deviate, and I hope + # sometime to be able to use imp.load_compiled to load them. (See + # the comment in load_module above.) + try: + with atomicwrites.atomic_write(pyc, mode="wb", overwrite=True) as fp: + fp.write(imp.get_magic()) + # as of now, bytecode header expects 32-bit numbers for size and mtime (#4903) + mtime = int(source_stat.mtime) & 0xFFFFFFFF + size = source_stat.size & 0xFFFFFFFF + # ">", + ast.Add: "+", + ast.Sub: "-", + ast.Mult: "*", + ast.Div: "/", + ast.FloorDiv: "//", + ast.Mod: "%%", # escaped for string formatting + ast.Eq: "==", + ast.NotEq: "!=", + ast.Lt: "<", + ast.LtE: "<=", + ast.Gt: ">", + ast.GtE: ">=", + ast.Pow: "**", + ast.Is: "is", + ast.IsNot: "is not", + ast.In: "in", + ast.NotIn: "not in", +} +# Python 3.5+ compatibility +try: + binop_map[ast.MatMult] = "@" +except AttributeError: + pass + +# Python 3.4+ compatibility +if hasattr(ast, "NameConstant"): + _NameConstant = ast.NameConstant +else: + + def _NameConstant(c): + return ast.Name(str(c), ast.Load()) + + +def set_location(node, lineno, col_offset): + """Set node location information recursively.""" + + def _fix(node, lineno, col_offset): + if "lineno" in node._attributes: + node.lineno = lineno + if "col_offset" in node._attributes: + node.col_offset = col_offset + for child in ast.iter_child_nodes(node): + _fix(child, lineno, col_offset) + + _fix(node, lineno, col_offset) + return node + + +class AssertionRewriter(ast.NodeVisitor): + """Assertion rewriting implementation. + + The main entrypoint is to call .run() with an ast.Module instance, + this will then find all the assert statements and rewrite them to + provide intermediate values and a detailed assertion error. See + http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html + for an overview of how this works. + + The entry point here is .run() which will iterate over all the + statements in an ast.Module and for each ast.Assert statement it + finds call .visit() with it. Then .visit_Assert() takes over and + is responsible for creating new ast statements to replace the + original assert statement: it rewrites the test of an assertion + to provide intermediate values and replace it with an if statement + which raises an assertion error with a detailed explanation in + case the expression is false. + + For this .visit_Assert() uses the visitor pattern to visit all the + AST nodes of the ast.Assert.test field, each visit call returning + an AST node and the corresponding explanation string. During this + state is kept in several instance attributes: + + :statements: All the AST statements which will replace the assert + statement. + + :variables: This is populated by .variable() with each variable + used by the statements so that they can all be set to None at + the end of the statements. + + :variable_counter: Counter to create new unique variables needed + by statements. Variables are created using .variable() and + have the form of "@py_assert0". + + :on_failure: The AST statements which will be executed if the + assertion test fails. This is the code which will construct + the failure message and raises the AssertionError. + + :explanation_specifiers: A dict filled by .explanation_param() + with %-formatting placeholders and their corresponding + expressions to use in the building of an assertion message. + This is used by .pop_format_context() to build a message. + + :stack: A stack of the explanation_specifiers dicts maintained by + .push_format_context() and .pop_format_context() which allows + to build another %-formatted string while already building one. + + This state is reset on every new assert statement visited and used + by the other visitors. + + """ + + def __init__(self, module_path, config): + super(AssertionRewriter, self).__init__() + self.module_path = module_path + self.config = config + + def run(self, mod): + """Find all assert statements in *mod* and rewrite them.""" + if not mod.body: + # Nothing to do. + return + # Insert some special imports at the top of the module but after any + # docstrings and __future__ imports. + aliases = [ + ast.alias(six.moves.builtins.__name__, "@py_builtins"), + ast.alias("_pytest.assertion.rewrite", "@pytest_ar"), + ] + doc = getattr(mod, "docstring", None) + expect_docstring = doc is None + if doc is not None and self.is_rewrite_disabled(doc): + return + pos = 0 + lineno = 1 + for item in mod.body: + if ( + expect_docstring + and isinstance(item, ast.Expr) + and isinstance(item.value, ast.Str) + ): + doc = item.value.s + if self.is_rewrite_disabled(doc): + return + expect_docstring = False + elif ( + not isinstance(item, ast.ImportFrom) + or item.level > 0 + or item.module != "__future__" + ): + lineno = item.lineno + break + pos += 1 + else: + lineno = item.lineno + imports = [ + ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases + ] + mod.body[pos:pos] = imports + # Collect asserts. + nodes = [mod] + while nodes: + node = nodes.pop() + for name, field in ast.iter_fields(node): + if isinstance(field, list): + new = [] + for i, child in enumerate(field): + if isinstance(child, ast.Assert): + # Transform assert. + new.extend(self.visit(child)) + else: + new.append(child) + if isinstance(child, ast.AST): + nodes.append(child) + setattr(node, name, new) + elif ( + isinstance(field, ast.AST) + # Don't recurse into expressions as they can't contain + # asserts. + and not isinstance(field, ast.expr) + ): + nodes.append(field) + + @staticmethod + def is_rewrite_disabled(docstring): + return "PYTEST_DONT_REWRITE" in docstring + + def variable(self): + """Get a new variable.""" + # Use a character invalid in python identifiers to avoid clashing. + name = "@py_assert" + str(next(self.variable_counter)) + self.variables.append(name) + return name + + def assign(self, expr): + """Give *expr* a name.""" + name = self.variable() + self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr)) + return ast.Name(name, ast.Load()) + + def display(self, expr): + """Call saferepr on the expression.""" + return self.helper("_saferepr", expr) + + def helper(self, name, *args): + """Call a helper in this module.""" + py_name = ast.Name("@pytest_ar", ast.Load()) + attr = ast.Attribute(py_name, name, ast.Load()) + return ast_Call(attr, list(args), []) + + def builtin(self, name): + """Return the builtin called *name*.""" + builtin_name = ast.Name("@py_builtins", ast.Load()) + return ast.Attribute(builtin_name, name, ast.Load()) + + def explanation_param(self, expr): + """Return a new named %-formatting placeholder for expr. + + This creates a %-formatting placeholder for expr in the + current formatting context, e.g. ``%(py0)s``. The placeholder + and expr are placed in the current format context so that it + can be used on the next call to .pop_format_context(). + + """ + specifier = "py" + str(next(self.variable_counter)) + self.explanation_specifiers[specifier] = expr + return "%(" + specifier + ")s" + + def push_format_context(self): + """Create a new formatting context. + + The format context is used for when an explanation wants to + have a variable value formatted in the assertion message. In + this case the value required can be added using + .explanation_param(). Finally .pop_format_context() is used + to format a string of %-formatted values as added by + .explanation_param(). + + """ + self.explanation_specifiers = {} + self.stack.append(self.explanation_specifiers) + + def pop_format_context(self, expl_expr): + """Format the %-formatted string with current format context. + + The expl_expr should be an ast.Str instance constructed from + the %-placeholders created by .explanation_param(). This will + add the required code to format said string to .on_failure and + return the ast.Name instance of the formatted string. + + """ + current = self.stack.pop() + if self.stack: + self.explanation_specifiers = self.stack[-1] + keys = [ast.Str(key) for key in current.keys()] + format_dict = ast.Dict(keys, list(current.values())) + form = ast.BinOp(expl_expr, ast.Mod(), format_dict) + name = "@py_format" + str(next(self.variable_counter)) + self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form)) + return ast.Name(name, ast.Load()) + + def generic_visit(self, node): + """Handle expressions we don't have custom code for.""" + assert isinstance(node, ast.expr) + res = self.assign(node) + return res, self.explanation_param(self.display(res)) + + def visit_Assert(self, assert_): + """Return the AST statements to replace the ast.Assert instance. + + This rewrites the test of an assertion to provide + intermediate values and replace it with an if statement which + raises an assertion error with a detailed explanation in case + the expression is false. + + """ + if isinstance(assert_.test, ast.Tuple) and len(assert_.test.elts) >= 1: + from _pytest.warning_types import PytestAssertRewriteWarning + import warnings + + warnings.warn_explicit( + PytestAssertRewriteWarning( + "assertion is always true, perhaps remove parentheses?" + ), + category=None, + filename=str(self.module_path), + lineno=assert_.lineno, + ) + + self.statements = [] + self.variables = [] + self.variable_counter = itertools.count() + self.stack = [] + self.on_failure = [] + self.push_format_context() + # Rewrite assert into a bunch of statements. + top_condition, explanation = self.visit(assert_.test) + # If in a test module, check if directly asserting None, in order to warn [Issue #3191] + if self.module_path is not None: + self.statements.append( + self.warn_about_none_ast( + top_condition, module_path=self.module_path, lineno=assert_.lineno + ) + ) + # Create failure message. + body = self.on_failure + negation = ast.UnaryOp(ast.Not(), top_condition) + self.statements.append(ast.If(negation, body, [])) + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + explanation = "\n>assert " + explanation + else: + assertmsg = ast.Str("") + explanation = "assert " + explanation + template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation)) + msg = self.pop_format_context(template) + fmt = self.helper("_format_explanation", msg) + err_name = ast.Name("AssertionError", ast.Load()) + exc = ast_Call(err_name, [fmt], []) + if sys.version_info[0] >= 3: + raise_ = ast.Raise(exc, None) + else: + raise_ = ast.Raise(exc, None, None) + body.append(raise_) + # Clear temporary variables by setting them to None. + if self.variables: + variables = [ast.Name(name, ast.Store()) for name in self.variables] + clear = ast.Assign(variables, _NameConstant(None)) + self.statements.append(clear) + # Fix line numbers. + for stmt in self.statements: + set_location(stmt, assert_.lineno, assert_.col_offset) + return self.statements + + def warn_about_none_ast(self, node, module_path, lineno): + """ + Returns an AST issuing a warning if the value of node is `None`. + This is used to warn the user when asserting a function that asserts + internally already. + See issue #3191 for more details. + """ + + # Using parse because it is different between py2 and py3. + AST_NONE = ast.parse("None").body[0].value + val_is_none = ast.Compare(node, [ast.Is()], [AST_NONE]) + send_warning = ast.parse( + """ +from _pytest.warning_types import PytestAssertRewriteWarning +from warnings import warn_explicit +warn_explicit( + PytestAssertRewriteWarning('asserting the value None, please use "assert is None"'), + category=None, + filename={filename!r}, + lineno={lineno}, +) + """.format( + filename=module_path.strpath, lineno=lineno + ) + ).body + return ast.If(val_is_none, send_warning, []) + + def visit_Name(self, name): + # Display the repr of the name if it's a local variable or + # _should_repr_global_name() thinks it's acceptable. + locs = ast_Call(self.builtin("locals"), [], []) + inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Str(name.id)) + return name, self.explanation_param(expr) + + def visit_BoolOp(self, boolop): + res_var = self.variable() + expl_list = self.assign(ast.List([], ast.Load())) + app = ast.Attribute(expl_list, "append", ast.Load()) + is_or = int(isinstance(boolop.op, ast.Or)) + body = save = self.statements + fail_save = self.on_failure + levels = len(boolop.values) - 1 + self.push_format_context() + # Process each operand, short-circuting if needed. + for i, v in enumerate(boolop.values): + if i: + fail_inner = [] + # cond is set in a prior loop iteration below + self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa + self.on_failure = fail_inner + self.push_format_context() + res, expl = self.visit(v) + body.append(ast.Assign([ast.Name(res_var, ast.Store())], res)) + expl_format = self.pop_format_context(ast.Str(expl)) + call = ast_Call(app, [expl_format], []) + self.on_failure.append(ast.Expr(call)) + if i < levels: + cond = res + if is_or: + cond = ast.UnaryOp(ast.Not(), cond) + inner = [] + self.statements.append(ast.If(cond, inner, [])) + self.statements = body = inner + self.statements = save + self.on_failure = fail_save + expl_template = self.helper("_format_boolop", expl_list, ast.Num(is_or)) + expl = self.pop_format_context(expl_template) + return ast.Name(res_var, ast.Load()), self.explanation_param(expl) + + def visit_UnaryOp(self, unary): + pattern = unary_map[unary.op.__class__] + operand_res, operand_expl = self.visit(unary.operand) + res = self.assign(ast.UnaryOp(unary.op, operand_res)) + return res, pattern % (operand_expl,) + + def visit_BinOp(self, binop): + symbol = binop_map[binop.op.__class__] + left_expr, left_expl = self.visit(binop.left) + right_expr, right_expl = self.visit(binop.right) + explanation = "(%s %s %s)" % (left_expl, symbol, right_expl) + res = self.assign(ast.BinOp(left_expr, binop.op, right_expr)) + return res, explanation + + def visit_Call_35(self, call): + """ + visit `ast.Call` nodes on Python3.5 and after + """ + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + for arg in call.args: + res, expl = self.visit(arg) + arg_expls.append(expl) + new_args.append(res) + for keyword in call.keywords: + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + if keyword.arg: + arg_expls.append(keyword.arg + "=" + expl) + else: # **args have `arg` keywords with an .arg of None + arg_expls.append("**" + expl) + + expl = "%s(%s)" % (func_expl, ", ".join(arg_expls)) + new_call = ast.Call(new_func, new_args, new_kwargs) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl) + return res, outer_expl + + def visit_Starred(self, starred): + # From Python 3.5, a Starred node can appear in a function call + res, expl = self.visit(starred.value) + new_starred = ast.Starred(res, starred.ctx) + return new_starred, "*" + expl + + def visit_Call_legacy(self, call): + """ + visit `ast.Call nodes on 3.4 and below` + """ + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + new_star = new_kwarg = None + for arg in call.args: + res, expl = self.visit(arg) + new_args.append(res) + arg_expls.append(expl) + for keyword in call.keywords: + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + arg_expls.append(keyword.arg + "=" + expl) + if call.starargs: + new_star, expl = self.visit(call.starargs) + arg_expls.append("*" + expl) + if call.kwargs: + new_kwarg, expl = self.visit(call.kwargs) + arg_expls.append("**" + expl) + expl = "%s(%s)" % (func_expl, ", ".join(arg_expls)) + new_call = ast.Call(new_func, new_args, new_kwargs, new_star, new_kwarg) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl) + return res, outer_expl + + # ast.Call signature changed on 3.5, + # conditionally change which methods is named + # visit_Call depending on Python version + if sys.version_info >= (3, 5): + visit_Call = visit_Call_35 + else: + visit_Call = visit_Call_legacy + + def visit_Attribute(self, attr): + if not isinstance(attr.ctx, ast.Load): + return self.generic_visit(attr) + value, value_expl = self.visit(attr.value) + res = self.assign(ast.Attribute(value, attr.attr, ast.Load())) + res_expl = self.explanation_param(self.display(res)) + pat = "%s\n{%s = %s.%s\n}" + expl = pat % (res_expl, res_expl, value_expl, attr.attr) + return res, expl + + def visit_Compare(self, comp): + self.push_format_context() + left_res, left_expl = self.visit(comp.left) + if isinstance(comp.left, (ast.Compare, ast.BoolOp)): + left_expl = "({})".format(left_expl) + res_variables = [self.variable() for i in range(len(comp.ops))] + load_names = [ast.Name(v, ast.Load()) for v in res_variables] + store_names = [ast.Name(v, ast.Store()) for v in res_variables] + it = zip(range(len(comp.ops)), comp.ops, comp.comparators) + expls = [] + syms = [] + results = [left_res] + for i, op, next_operand in it: + next_res, next_expl = self.visit(next_operand) + if isinstance(next_operand, (ast.Compare, ast.BoolOp)): + next_expl = "({})".format(next_expl) + results.append(next_res) + sym = binop_map[op.__class__] + syms.append(ast.Str(sym)) + expl = "%s %s %s" % (left_expl, sym, next_expl) + expls.append(ast.Str(expl)) + res_expr = ast.Compare(left_res, [op], [next_res]) + self.statements.append(ast.Assign([store_names[i]], res_expr)) + left_res, left_expl = next_res, next_expl + # Use pytest.assertion.util._reprcompare if that's available. + expl_call = self.helper( + "_call_reprcompare", + ast.Tuple(syms, ast.Load()), + ast.Tuple(load_names, ast.Load()), + ast.Tuple(expls, ast.Load()), + ast.Tuple(results, ast.Load()), + ) + if len(comp.ops) > 1: + res = ast.BoolOp(ast.And(), load_names) + else: + res = load_names[0] + return res, self.explanation_param(self.pop_format_context(expl_call)) diff --git a/venv/lib/python2.7/site-packages/_pytest/assertion/truncate.py b/venv/lib/python2.7/site-packages/_pytest/assertion/truncate.py new file mode 100644 index 0000000..525896e --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/assertion/truncate.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +""" +Utilities for truncating assertion output. + +Current default behaviour is to truncate assertion explanations at +~8 terminal lines, unless running in "-vv" mode or running on CI. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os + +import six + +DEFAULT_MAX_LINES = 8 +DEFAULT_MAX_CHARS = 8 * 80 +USAGE_MSG = "use '-vv' to show" + + +def truncate_if_required(explanation, item, max_length=None): + """ + Truncate this assertion explanation if the given test item is eligible. + """ + if _should_truncate_item(item): + return _truncate_explanation(explanation) + return explanation + + +def _should_truncate_item(item): + """ + Whether or not this test item is eligible for truncation. + """ + verbose = item.config.option.verbose + return verbose < 2 and not _running_on_ci() + + +def _running_on_ci(): + """Check if we're currently running on a CI system.""" + env_vars = ["CI", "BUILD_NUMBER"] + return any(var in os.environ for var in env_vars) + + +def _truncate_explanation(input_lines, max_lines=None, max_chars=None): + """ + Truncate given list of strings that makes up the assertion explanation. + + Truncates to either 8 lines, or 640 characters - whichever the input reaches + first. The remaining lines will be replaced by a usage message. + """ + + if max_lines is None: + max_lines = DEFAULT_MAX_LINES + if max_chars is None: + max_chars = DEFAULT_MAX_CHARS + + # Check if truncation required + input_char_count = len("".join(input_lines)) + if len(input_lines) <= max_lines and input_char_count <= max_chars: + return input_lines + + # Truncate first to max_lines, and then truncate to max_chars if max_chars + # is exceeded. + truncated_explanation = input_lines[:max_lines] + truncated_explanation = _truncate_by_char_count(truncated_explanation, max_chars) + + # Add ellipsis to final line + truncated_explanation[-1] = truncated_explanation[-1] + "..." + + # Append useful message to explanation + truncated_line_count = len(input_lines) - len(truncated_explanation) + truncated_line_count += 1 # Account for the part-truncated final line + msg = "...Full output truncated" + if truncated_line_count == 1: + msg += " ({} line hidden)".format(truncated_line_count) + else: + msg += " ({} lines hidden)".format(truncated_line_count) + msg += ", {}".format(USAGE_MSG) + truncated_explanation.extend([six.text_type(""), six.text_type(msg)]) + return truncated_explanation + + +def _truncate_by_char_count(input_lines, max_chars): + # Check if truncation required + if len("".join(input_lines)) <= max_chars: + return input_lines + + # Find point at which input length exceeds total allowed length + iterated_char_count = 0 + for iterated_index, input_line in enumerate(input_lines): + if iterated_char_count + len(input_line) > max_chars: + break + iterated_char_count += len(input_line) + + # Create truncated explanation with modified final line + truncated_result = input_lines[:iterated_index] + final_line = input_lines[iterated_index] + if final_line: + final_line_truncate_point = max_chars - iterated_char_count + final_line = final_line[:final_line_truncate_point] + truncated_result.append(final_line) + return truncated_result diff --git a/venv/lib/python2.7/site-packages/_pytest/assertion/util.py b/venv/lib/python2.7/site-packages/_pytest/assertion/util.py new file mode 100644 index 0000000..c382f1c --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/assertion/util.py @@ -0,0 +1,421 @@ +# -*- coding: utf-8 -*- +"""Utilities for assertion debugging""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import pprint + +import six + +import _pytest._code +from ..compat import Sequence +from _pytest import outcomes +from _pytest._io.saferepr import saferepr +from _pytest.compat import ATTRS_EQ_FIELD + +# The _reprcompare attribute on the util module is used by the new assertion +# interpretation code and assertion rewriter to detect this plugin was +# loaded and in turn call the hooks defined here as part of the +# DebugInterpreter. +_reprcompare = None + + +# the re-encoding is needed for python2 repr +# with non-ascii characters (see issue 877 and 1379) +def ecu(s): + if isinstance(s, bytes): + return s.decode("UTF-8", "replace") + else: + return s + + +def format_explanation(explanation): + """This formats an explanation + + Normally all embedded newlines are escaped, however there are + three exceptions: \n{, \n} and \n~. The first two are intended + cover nested explanations, see function and attribute explanations + for examples (.visit_Call(), visit_Attribute()). The last one is + for when one explanation needs to span multiple lines, e.g. when + displaying diffs. + """ + explanation = ecu(explanation) + lines = _split_explanation(explanation) + result = _format_lines(lines) + return u"\n".join(result) + + +def _split_explanation(explanation): + """Return a list of individual lines in the explanation + + This will return a list of lines split on '\n{', '\n}' and '\n~'. + Any other newlines will be escaped and appear in the line as the + literal '\n' characters. + """ + raw_lines = (explanation or u"").split("\n") + lines = [raw_lines[0]] + for values in raw_lines[1:]: + if values and values[0] in ["{", "}", "~", ">"]: + lines.append(values) + else: + lines[-1] += "\\n" + values + return lines + + +def _format_lines(lines): + """Format the individual lines + + This will replace the '{', '}' and '~' characters of our mini + formatting language with the proper 'where ...', 'and ...' and ' + + ...' text, taking care of indentation along the way. + + Return a list of formatted lines. + """ + result = lines[:1] + stack = [0] + stackcnt = [0] + for line in lines[1:]: + if line.startswith("{"): + if stackcnt[-1]: + s = u"and " + else: + s = u"where " + stack.append(len(result)) + stackcnt[-1] += 1 + stackcnt.append(0) + result.append(u" +" + u" " * (len(stack) - 1) + s + line[1:]) + elif line.startswith("}"): + stack.pop() + stackcnt.pop() + result[stack[-1]] += line[1:] + else: + assert line[0] in ["~", ">"] + stack[-1] += 1 + indent = len(stack) if line.startswith("~") else len(stack) - 1 + result.append(u" " * indent + line[1:]) + assert len(stack) == 1 + return result + + +# Provide basestring in python3 +try: + basestring = basestring +except NameError: + basestring = str + + +def issequence(x): + return isinstance(x, Sequence) and not isinstance(x, basestring) + + +def istext(x): + return isinstance(x, basestring) + + +def isdict(x): + return isinstance(x, dict) + + +def isset(x): + return isinstance(x, (set, frozenset)) + + +def isdatacls(obj): + return getattr(obj, "__dataclass_fields__", None) is not None + + +def isattrs(obj): + return getattr(obj, "__attrs_attrs__", None) is not None + + +def isiterable(obj): + try: + iter(obj) + return not istext(obj) + except TypeError: + return False + + +def assertrepr_compare(config, op, left, right): + """Return specialised explanations for some operators/operands""" + width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op + left_repr = saferepr(left, maxsize=int(width // 2)) + right_repr = saferepr(right, maxsize=width - len(left_repr)) + + summary = u"%s %s %s" % (ecu(left_repr), op, ecu(right_repr)) + + verbose = config.getoption("verbose") + explanation = None + try: + if op == "==": + if istext(left) and istext(right): + explanation = _diff_text(left, right, verbose) + else: + if issequence(left) and issequence(right): + explanation = _compare_eq_sequence(left, right, verbose) + elif isset(left) and isset(right): + explanation = _compare_eq_set(left, right, verbose) + elif isdict(left) and isdict(right): + explanation = _compare_eq_dict(left, right, verbose) + elif type(left) == type(right) and (isdatacls(left) or isattrs(left)): + type_fn = (isdatacls, isattrs) + explanation = _compare_eq_cls(left, right, verbose, type_fn) + elif verbose > 0: + explanation = _compare_eq_verbose(left, right) + if isiterable(left) and isiterable(right): + expl = _compare_eq_iterable(left, right, verbose) + if explanation is not None: + explanation.extend(expl) + else: + explanation = expl + elif op == "not in": + if istext(left) and istext(right): + explanation = _notin_text(left, right, verbose) + except outcomes.Exit: + raise + except Exception: + explanation = [ + u"(pytest_assertion plugin: representation of details failed. " + u"Probably an object has a faulty __repr__.)", + six.text_type(_pytest._code.ExceptionInfo.from_current()), + ] + + if not explanation: + return None + + return [summary] + explanation + + +def _diff_text(left, right, verbose=0): + """Return the explanation for the diff between text or bytes. + + Unless --verbose is used this will skip leading and trailing + characters which are identical to keep the diff minimal. + + If the input are bytes they will be safely converted to text. + """ + from difflib import ndiff + + explanation = [] + + def escape_for_readable_diff(binary_text): + """ + Ensures that the internal string is always valid unicode, converting any bytes safely to valid unicode. + This is done using repr() which then needs post-processing to fix the encompassing quotes and un-escape + newlines and carriage returns (#429). + """ + r = six.text_type(repr(binary_text)[1:-1]) + r = r.replace(r"\n", "\n") + r = r.replace(r"\r", "\r") + return r + + if isinstance(left, bytes): + left = escape_for_readable_diff(left) + if isinstance(right, bytes): + right = escape_for_readable_diff(right) + if verbose < 1: + i = 0 # just in case left or right has zero length + for i in range(min(len(left), len(right))): + if left[i] != right[i]: + break + if i > 42: + i -= 10 # Provide some context + explanation = [ + u"Skipping %s identical leading characters in diff, use -v to show" % i + ] + left = left[i:] + right = right[i:] + if len(left) == len(right): + for i in range(len(left)): + if left[-i] != right[-i]: + break + if i > 42: + i -= 10 # Provide some context + explanation += [ + u"Skipping {} identical trailing " + u"characters in diff, use -v to show".format(i) + ] + left = left[:-i] + right = right[:-i] + keepends = True + if left.isspace() or right.isspace(): + left = repr(str(left)) + right = repr(str(right)) + explanation += [u"Strings contain only whitespace, escaping them using repr()"] + explanation += [ + line.strip("\n") + for line in ndiff(left.splitlines(keepends), right.splitlines(keepends)) + ] + return explanation + + +def _compare_eq_verbose(left, right): + keepends = True + left_lines = repr(left).splitlines(keepends) + right_lines = repr(right).splitlines(keepends) + + explanation = [] + explanation += [u"-" + line for line in left_lines] + explanation += [u"+" + line for line in right_lines] + + return explanation + + +def _compare_eq_iterable(left, right, verbose=0): + if not verbose: + return [u"Use -v to get the full diff"] + # dynamic import to speedup pytest + import difflib + + try: + left_formatting = pprint.pformat(left).splitlines() + right_formatting = pprint.pformat(right).splitlines() + explanation = [u"Full diff:"] + except Exception: + # hack: PrettyPrinter.pformat() in python 2 fails when formatting items that can't be sorted(), ie, calling + # sorted() on a list would raise. See issue #718. + # As a workaround, the full diff is generated by using the repr() string of each item of each container. + left_formatting = sorted(repr(x) for x in left) + right_formatting = sorted(repr(x) for x in right) + explanation = [u"Full diff (fallback to calling repr on each item):"] + explanation.extend( + line.strip() for line in difflib.ndiff(left_formatting, right_formatting) + ) + return explanation + + +def _compare_eq_sequence(left, right, verbose=0): + explanation = [] + len_left = len(left) + len_right = len(right) + for i in range(min(len_left, len_right)): + if left[i] != right[i]: + explanation += [u"At index %s diff: %r != %r" % (i, left[i], right[i])] + break + len_diff = len_left - len_right + + if len_diff: + if len_diff > 0: + dir_with_more = "Left" + extra = saferepr(left[len_right]) + else: + len_diff = 0 - len_diff + dir_with_more = "Right" + extra = saferepr(right[len_left]) + + if len_diff == 1: + explanation += [u"%s contains one more item: %s" % (dir_with_more, extra)] + else: + explanation += [ + u"%s contains %d more items, first extra item: %s" + % (dir_with_more, len_diff, extra) + ] + return explanation + + +def _compare_eq_set(left, right, verbose=0): + explanation = [] + diff_left = left - right + diff_right = right - left + if diff_left: + explanation.append(u"Extra items in the left set:") + for item in diff_left: + explanation.append(saferepr(item)) + if diff_right: + explanation.append(u"Extra items in the right set:") + for item in diff_right: + explanation.append(saferepr(item)) + return explanation + + +def _compare_eq_dict(left, right, verbose=0): + explanation = [] + set_left = set(left) + set_right = set(right) + common = set_left.intersection(set_right) + same = {k: left[k] for k in common if left[k] == right[k]} + if same and verbose < 2: + explanation += [u"Omitting %s identical items, use -vv to show" % len(same)] + elif same: + explanation += [u"Common items:"] + explanation += pprint.pformat(same).splitlines() + diff = {k for k in common if left[k] != right[k]} + if diff: + explanation += [u"Differing items:"] + for k in diff: + explanation += [saferepr({k: left[k]}) + " != " + saferepr({k: right[k]})] + extra_left = set_left - set_right + len_extra_left = len(extra_left) + if len_extra_left: + explanation.append( + u"Left contains %d more item%s:" + % (len_extra_left, "" if len_extra_left == 1 else "s") + ) + explanation.extend( + pprint.pformat({k: left[k] for k in extra_left}).splitlines() + ) + extra_right = set_right - set_left + len_extra_right = len(extra_right) + if len_extra_right: + explanation.append( + u"Right contains %d more item%s:" + % (len_extra_right, "" if len_extra_right == 1 else "s") + ) + explanation.extend( + pprint.pformat({k: right[k] for k in extra_right}).splitlines() + ) + return explanation + + +def _compare_eq_cls(left, right, verbose, type_fns): + isdatacls, isattrs = type_fns + if isdatacls(left): + all_fields = left.__dataclass_fields__ + fields_to_check = [field for field, info in all_fields.items() if info.compare] + elif isattrs(left): + all_fields = left.__attrs_attrs__ + fields_to_check = [ + field.name for field in all_fields if getattr(field, ATTRS_EQ_FIELD) + ] + + same = [] + diff = [] + for field in fields_to_check: + if getattr(left, field) == getattr(right, field): + same.append(field) + else: + diff.append(field) + + explanation = [] + if same and verbose < 2: + explanation.append(u"Omitting %s identical items, use -vv to show" % len(same)) + elif same: + explanation += [u"Matching attributes:"] + explanation += pprint.pformat(same).splitlines() + if diff: + explanation += [u"Differing attributes:"] + for field in diff: + explanation += [ + (u"%s: %r != %r") % (field, getattr(left, field), getattr(right, field)) + ] + return explanation + + +def _notin_text(term, text, verbose=0): + index = text.find(term) + head = text[:index] + tail = text[index + len(term) :] + correct_text = head + tail + diff = _diff_text(correct_text, text, verbose) + newdiff = [u"%s is contained here:" % saferepr(term, maxsize=42)] + for line in diff: + if line.startswith(u"Skipping"): + continue + if line.startswith(u"- "): + continue + if line.startswith(u"+ "): + newdiff.append(u" " + line[2:]) + else: + newdiff.append(line) + return newdiff diff --git a/venv/lib/python2.7/site-packages/_pytest/cacheprovider.py b/venv/lib/python2.7/site-packages/_pytest/cacheprovider.py new file mode 100644 index 0000000..f5c5545 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/cacheprovider.py @@ -0,0 +1,438 @@ +# -*- coding: utf-8 -*- +""" +merged implementation of the cache provider + +the name cache was not chosen to ensure pluggy automatically +ignores the external pytest-cache +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import json +import os +from collections import OrderedDict + +import attr +import py +import six + +import pytest +from .compat import _PY2 as PY2 +from .pathlib import Path +from .pathlib import resolve_from_str +from .pathlib import rm_rf + +README_CONTENT = u"""\ +# pytest cache directory # + +This directory contains data from the pytest's cache plugin, +which provides the `--lf` and `--ff` options, as well as the `cache` fixture. + +**Do not** commit this to version control. + +See [the docs](https://docs.pytest.org/en/latest/cache.html) for more information. +""" + +CACHEDIR_TAG_CONTENT = b"""\ +Signature: 8a477f597d28d172789f06886806bc55 +# This file is a cache directory tag created by pytest. +# For information about cache directory tags, see: +# http://www.bford.info/cachedir/spec.html +""" + + +@attr.s +class Cache(object): + _cachedir = attr.ib(repr=False) + _config = attr.ib(repr=False) + + @classmethod + def for_config(cls, config): + cachedir = cls.cache_dir_from_config(config) + if config.getoption("cacheclear") and cachedir.exists(): + rm_rf(cachedir) + cachedir.mkdir() + return cls(cachedir, config) + + @staticmethod + def cache_dir_from_config(config): + return resolve_from_str(config.getini("cache_dir"), config.rootdir) + + def warn(self, fmt, **args): + from _pytest.warnings import _issue_warning_captured + from _pytest.warning_types import PytestCacheWarning + + _issue_warning_captured( + PytestCacheWarning(fmt.format(**args) if args else fmt), + self._config.hook, + stacklevel=3, + ) + + def makedir(self, name): + """ return a directory path object with the given name. If the + directory does not yet exist, it will be created. You can use it + to manage files likes e. g. store/retrieve database + dumps across test sessions. + + :param name: must be a string not containing a ``/`` separator. + Make sure the name contains your plugin or application + identifiers to prevent clashes with other cache users. + """ + name = Path(name) + if len(name.parts) > 1: + raise ValueError("name is not allowed to contain path separators") + res = self._cachedir.joinpath("d", name) + res.mkdir(exist_ok=True, parents=True) + return py.path.local(res) + + def _getvaluepath(self, key): + return self._cachedir.joinpath("v", Path(key)) + + def get(self, key, default): + """ return cached value for the given key. If no value + was yet cached or the value cannot be read, the specified + default is returned. + + :param key: must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param default: must be provided in case of a cache-miss or + invalid cache values. + + """ + path = self._getvaluepath(key) + try: + with path.open("r") as f: + return json.load(f) + except (ValueError, IOError, OSError): + return default + + def set(self, key, value): + """ save value for the given key. + + :param key: must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param value: must be of any combination of basic + python types, including nested types + like e. g. lists of dictionaries. + """ + path = self._getvaluepath(key) + try: + if path.parent.is_dir(): + cache_dir_exists_already = True + else: + cache_dir_exists_already = self._cachedir.exists() + path.parent.mkdir(exist_ok=True, parents=True) + except (IOError, OSError): + self.warn("could not create cache path {path}", path=path) + return + if not cache_dir_exists_already: + self._ensure_supporting_files() + try: + f = path.open("wb" if PY2 else "w") + except (IOError, OSError): + self.warn("cache could not write path {path}", path=path) + else: + with f: + json.dump(value, f, indent=2, sort_keys=True) + + def _ensure_supporting_files(self): + """Create supporting files in the cache dir that are not really part of the cache.""" + readme_path = self._cachedir / "README.md" + readme_path.write_text(README_CONTENT) + + gitignore_path = self._cachedir.joinpath(".gitignore") + msg = u"# Created by pytest automatically.\n*" + gitignore_path.write_text(msg, encoding="UTF-8") + + cachedir_tag_path = self._cachedir.joinpath("CACHEDIR.TAG") + cachedir_tag_path.write_bytes(CACHEDIR_TAG_CONTENT) + + +class LFPlugin(object): + """ Plugin which implements the --lf (run last-failing) option """ + + def __init__(self, config): + self.config = config + active_keys = "lf", "failedfirst" + self.active = any(config.getoption(key) for key in active_keys) + self.lastfailed = config.cache.get("cache/lastfailed", {}) + self._previously_failed_count = None + self._report_status = None + self._skipped_files = 0 # count skipped files during collection due to --lf + + def last_failed_paths(self): + """Returns a set with all Paths()s of the previously failed nodeids (cached). + """ + try: + return self._last_failed_paths + except AttributeError: + rootpath = Path(self.config.rootdir) + result = {rootpath / nodeid.split("::")[0] for nodeid in self.lastfailed} + result = {x for x in result if x.exists()} + self._last_failed_paths = result + return result + + def pytest_ignore_collect(self, path): + """ + Ignore this file path if we are in --lf mode and it is not in the list of + previously failed files. + """ + if self.active and self.config.getoption("lf") and path.isfile(): + last_failed_paths = self.last_failed_paths() + if last_failed_paths: + skip_it = Path(path) not in self.last_failed_paths() + if skip_it: + self._skipped_files += 1 + return skip_it + + def pytest_report_collectionfinish(self): + if self.active and self.config.getoption("verbose") >= 0: + return "run-last-failure: %s" % self._report_status + + def pytest_runtest_logreport(self, report): + if (report.when == "call" and report.passed) or report.skipped: + self.lastfailed.pop(report.nodeid, None) + elif report.failed: + self.lastfailed[report.nodeid] = True + + def pytest_collectreport(self, report): + passed = report.outcome in ("passed", "skipped") + if passed: + if report.nodeid in self.lastfailed: + self.lastfailed.pop(report.nodeid) + self.lastfailed.update((item.nodeid, True) for item in report.result) + else: + self.lastfailed[report.nodeid] = True + + def pytest_collection_modifyitems(self, session, config, items): + if not self.active: + return + + if self.lastfailed: + previously_failed = [] + previously_passed = [] + for item in items: + if item.nodeid in self.lastfailed: + previously_failed.append(item) + else: + previously_passed.append(item) + self._previously_failed_count = len(previously_failed) + + if not previously_failed: + # Running a subset of all tests with recorded failures + # only outside of it. + self._report_status = "%d known failures not in selected tests" % ( + len(self.lastfailed), + ) + else: + if self.config.getoption("lf"): + items[:] = previously_failed + config.hook.pytest_deselected(items=previously_passed) + else: # --failedfirst + items[:] = previously_failed + previously_passed + + noun = "failure" if self._previously_failed_count == 1 else "failures" + suffix = " first" if self.config.getoption("failedfirst") else "" + self._report_status = "rerun previous {count} {noun}{suffix}".format( + count=self._previously_failed_count, suffix=suffix, noun=noun + ) + + if self._skipped_files > 0: + files_noun = "file" if self._skipped_files == 1 else "files" + self._report_status += " (skipped {files} {files_noun})".format( + files=self._skipped_files, files_noun=files_noun + ) + else: + self._report_status = "no previously failed tests, " + if self.config.getoption("last_failed_no_failures") == "none": + self._report_status += "deselecting all items." + config.hook.pytest_deselected(items=items) + items[:] = [] + else: + self._report_status += "not deselecting items." + + def pytest_sessionfinish(self, session): + config = self.config + if config.getoption("cacheshow") or hasattr(config, "slaveinput"): + return + + saved_lastfailed = config.cache.get("cache/lastfailed", {}) + if saved_lastfailed != self.lastfailed: + config.cache.set("cache/lastfailed", self.lastfailed) + + +class NFPlugin(object): + """ Plugin which implements the --nf (run new-first) option """ + + def __init__(self, config): + self.config = config + self.active = config.option.newfirst + self.cached_nodeids = config.cache.get("cache/nodeids", []) + + def pytest_collection_modifyitems(self, session, config, items): + if self.active: + new_items = OrderedDict() + other_items = OrderedDict() + for item in items: + if item.nodeid not in self.cached_nodeids: + new_items[item.nodeid] = item + else: + other_items[item.nodeid] = item + + items[:] = self._get_increasing_order( + six.itervalues(new_items) + ) + self._get_increasing_order(six.itervalues(other_items)) + self.cached_nodeids = [x.nodeid for x in items if isinstance(x, pytest.Item)] + + def _get_increasing_order(self, items): + return sorted(items, key=lambda item: item.fspath.mtime(), reverse=True) + + def pytest_sessionfinish(self, session): + config = self.config + if config.getoption("cacheshow") or hasattr(config, "slaveinput"): + return + + config.cache.set("cache/nodeids", self.cached_nodeids) + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption( + "--lf", + "--last-failed", + action="store_true", + dest="lf", + help="rerun only the tests that failed " + "at the last run (or all if none failed)", + ) + group.addoption( + "--ff", + "--failed-first", + action="store_true", + dest="failedfirst", + help="run all tests but run the last failures first. " + "This may re-order tests and thus lead to " + "repeated fixture setup/teardown", + ) + group.addoption( + "--nf", + "--new-first", + action="store_true", + dest="newfirst", + help="run tests from new files first, then the rest of the tests " + "sorted by file mtime", + ) + group.addoption( + "--cache-show", + action="append", + nargs="?", + dest="cacheshow", + help=( + "show cache contents, don't perform collection or tests. " + "Optional argument: glob (default: '*')." + ), + ) + group.addoption( + "--cache-clear", + action="store_true", + dest="cacheclear", + help="remove all cache contents at start of test run.", + ) + cache_dir_default = ".pytest_cache" + if "TOX_ENV_DIR" in os.environ: + cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default) + parser.addini("cache_dir", default=cache_dir_default, help="cache directory path.") + group.addoption( + "--lfnf", + "--last-failed-no-failures", + action="store", + dest="last_failed_no_failures", + choices=("all", "none"), + default="all", + help="which tests to run with no previously (known) failures.", + ) + + +def pytest_cmdline_main(config): + if config.option.cacheshow: + from _pytest.main import wrap_session + + return wrap_session(config, cacheshow) + + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config): + config.cache = Cache.for_config(config) + config.pluginmanager.register(LFPlugin(config), "lfplugin") + config.pluginmanager.register(NFPlugin(config), "nfplugin") + + +@pytest.fixture +def cache(request): + """ + Return a cache object that can persist state between testing sessions. + + cache.get(key, default) + cache.set(key, value) + + Keys must be a ``/`` separated value, where the first part is usually the + name of your plugin or application to avoid clashes with other cache users. + + Values can be any object handled by the json stdlib module. + """ + return request.config.cache + + +def pytest_report_header(config): + """Display cachedir with --cache-show and if non-default.""" + if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache": + cachedir = config.cache._cachedir + # TODO: evaluate generating upward relative paths + # starting with .., ../.. if sensible + + try: + displaypath = cachedir.relative_to(config.rootdir) + except ValueError: + displaypath = cachedir + return "cachedir: {}".format(displaypath) + + +def cacheshow(config, session): + from pprint import pformat + + tw = py.io.TerminalWriter() + tw.line("cachedir: " + str(config.cache._cachedir)) + if not config.cache._cachedir.is_dir(): + tw.line("cache is empty") + return 0 + + glob = config.option.cacheshow[0] + if glob is None: + glob = "*" + + dummy = object() + basedir = config.cache._cachedir + vdir = basedir / "v" + tw.sep("-", "cache values for %r" % glob) + for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()): + key = valpath.relative_to(vdir) + val = config.cache.get(key, dummy) + if val is dummy: + tw.line("%s contains unreadable content, will be ignored" % key) + else: + tw.line("%s contains:" % key) + for line in pformat(val).splitlines(): + tw.line(" " + line) + + ddir = basedir / "d" + if ddir.is_dir(): + contents = sorted(ddir.rglob(glob)) + tw.sep("-", "cache directories for %r" % glob) + for p in contents: + # if p.check(dir=1): + # print("%s/" % p.relto(basedir)) + if p.is_file(): + key = p.relative_to(basedir) + tw.line("{} is a file of length {:d}".format(key, p.stat().st_size)) + return 0 diff --git a/venv/lib/python2.7/site-packages/_pytest/capture.py b/venv/lib/python2.7/site-packages/_pytest/capture.py new file mode 100644 index 0000000..68c1777 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/capture.py @@ -0,0 +1,850 @@ +# -*- coding: utf-8 -*- +""" +per-test stdout/stderr capturing mechanism. + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import contextlib +import io +import os +import sys +from io import UnsupportedOperation +from tempfile import TemporaryFile + +import six + +import pytest +from _pytest.compat import _PY3 +from _pytest.compat import CaptureIO + +patchsysdict = {0: "stdin", 1: "stdout", 2: "stderr"} + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group._addoption( + "--capture", + action="store", + default="fd" if hasattr(os, "dup") else "sys", + metavar="method", + choices=["fd", "sys", "no"], + help="per-test capturing method: one of fd|sys|no.", + ) + group._addoption( + "-s", + action="store_const", + const="no", + dest="capture", + help="shortcut for --capture=no.", + ) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_load_initial_conftests(early_config, parser, args): + ns = early_config.known_args_namespace + if ns.capture == "fd": + _py36_windowsconsoleio_workaround(sys.stdout) + _colorama_workaround() + _readline_workaround() + pluginmanager = early_config.pluginmanager + capman = CaptureManager(ns.capture) + pluginmanager.register(capman, "capturemanager") + + # make sure that capturemanager is properly reset at final shutdown + early_config.add_cleanup(capman.stop_global_capturing) + + # finally trigger conftest loading but while capturing (issue93) + capman.start_global_capturing() + outcome = yield + capman.suspend_global_capture() + if outcome.excinfo is not None: + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + + +class CaptureManager(object): + """ + Capture plugin, manages that the appropriate capture method is enabled/disabled during collection and each + test phase (setup, call, teardown). After each of those points, the captured output is obtained and + attached to the collection/runtest report. + + There are two levels of capture: + * global: which is enabled by default and can be suppressed by the ``-s`` option. This is always enabled/disabled + during collection and each test phase. + * fixture: when a test function or one of its fixture depend on the ``capsys`` or ``capfd`` fixtures. In this + case special handling is needed to ensure the fixtures take precedence over the global capture. + """ + + def __init__(self, method): + self._method = method + self._global_capturing = None + self._current_item = None + + def __repr__(self): + return "" % ( + self._method, + self._global_capturing, + self._current_item, + ) + + def _getcapture(self, method): + if method == "fd": + return MultiCapture(out=True, err=True, Capture=FDCapture) + elif method == "sys": + return MultiCapture(out=True, err=True, Capture=SysCapture) + elif method == "no": + return MultiCapture(out=False, err=False, in_=False) + raise ValueError("unknown capturing method: %r" % method) # pragma: no cover + + def is_capturing(self): + if self.is_globally_capturing(): + return "global" + capture_fixture = getattr(self._current_item, "_capture_fixture", None) + if capture_fixture is not None: + return ( + "fixture %s" % self._current_item._capture_fixture.request.fixturename + ) + return False + + # Global capturing control + + def is_globally_capturing(self): + return self._method != "no" + + def start_global_capturing(self): + assert self._global_capturing is None + self._global_capturing = self._getcapture(self._method) + self._global_capturing.start_capturing() + + def stop_global_capturing(self): + if self._global_capturing is not None: + self._global_capturing.pop_outerr_to_orig() + self._global_capturing.stop_capturing() + self._global_capturing = None + + def resume_global_capture(self): + # During teardown of the python process, and on rare occasions, capture + # attributes can be `None` while trying to resume global capture. + if self._global_capturing is not None: + self._global_capturing.resume_capturing() + + def suspend_global_capture(self, in_=False): + cap = getattr(self, "_global_capturing", None) + if cap is not None: + cap.suspend_capturing(in_=in_) + + def suspend(self, in_=False): + # Need to undo local capsys-et-al if it exists before disabling global capture. + self.suspend_fixture(self._current_item) + self.suspend_global_capture(in_) + + def resume(self): + self.resume_global_capture() + self.resume_fixture(self._current_item) + + def read_global_capture(self): + return self._global_capturing.readouterr() + + # Fixture Control (it's just forwarding, think about removing this later) + + def activate_fixture(self, item): + """If the current item is using ``capsys`` or ``capfd``, activate them so they take precedence over + the global capture. + """ + fixture = getattr(item, "_capture_fixture", None) + if fixture is not None: + fixture._start() + + def deactivate_fixture(self, item): + """Deactivates the ``capsys`` or ``capfd`` fixture of this item, if any.""" + fixture = getattr(item, "_capture_fixture", None) + if fixture is not None: + fixture.close() + + def suspend_fixture(self, item): + fixture = getattr(item, "_capture_fixture", None) + if fixture is not None: + fixture._suspend() + + def resume_fixture(self, item): + fixture = getattr(item, "_capture_fixture", None) + if fixture is not None: + fixture._resume() + + # Helper context managers + + @contextlib.contextmanager + def global_and_fixture_disabled(self): + """Context manager to temporarily disable global and current fixture capturing.""" + self.suspend() + try: + yield + finally: + self.resume() + + @contextlib.contextmanager + def item_capture(self, when, item): + self.resume_global_capture() + self.activate_fixture(item) + try: + yield + finally: + self.deactivate_fixture(item) + self.suspend_global_capture(in_=False) + + out, err = self.read_global_capture() + item.add_report_section(when, "stdout", out) + item.add_report_section(when, "stderr", err) + + # Hooks + + @pytest.hookimpl(hookwrapper=True) + def pytest_make_collect_report(self, collector): + if isinstance(collector, pytest.File): + self.resume_global_capture() + outcome = yield + self.suspend_global_capture() + out, err = self.read_global_capture() + rep = outcome.get_result() + if out: + rep.sections.append(("Captured stdout", out)) + if err: + rep.sections.append(("Captured stderr", err)) + else: + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_protocol(self, item): + self._current_item = item + yield + self._current_item = None + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_setup(self, item): + with self.item_capture("setup", item): + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_call(self, item): + with self.item_capture("call", item): + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_teardown(self, item): + with self.item_capture("teardown", item): + yield + + @pytest.hookimpl(tryfirst=True) + def pytest_keyboard_interrupt(self, excinfo): + self.stop_global_capturing() + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(self, excinfo): + self.stop_global_capturing() + + +capture_fixtures = {"capfd", "capfdbinary", "capsys", "capsysbinary"} + + +def _ensure_only_one_capture_fixture(request, name): + fixtures = set(request.fixturenames) & capture_fixtures - {name} + if fixtures: + fixtures = sorted(fixtures) + fixtures = fixtures[0] if len(fixtures) == 1 else fixtures + raise request.raiseerror( + "cannot use {} and {} at the same time".format(fixtures, name) + ) + + +@pytest.fixture +def capsys(request): + """Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsys.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + """ + _ensure_only_one_capture_fixture(request, "capsys") + with _install_capture_fixture_on_item(request, SysCapture) as fixture: + yield fixture + + +@pytest.fixture +def capsysbinary(request): + """Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsysbinary.readouterr()`` + method calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``bytes`` objects. + """ + _ensure_only_one_capture_fixture(request, "capsysbinary") + # Currently, the implementation uses the python3 specific `.buffer` + # property of CaptureIO. + if sys.version_info < (3,): + raise request.raiseerror("capsysbinary is only supported on Python 3") + with _install_capture_fixture_on_item(request, SysCaptureBinary) as fixture: + yield fixture + + +@pytest.fixture +def capfd(request): + """Enable text capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + """ + _ensure_only_one_capture_fixture(request, "capfd") + if not hasattr(os, "dup"): + pytest.skip( + "capfd fixture needs os.dup function which is not available in this system" + ) + with _install_capture_fixture_on_item(request, FDCapture) as fixture: + yield fixture + + +@pytest.fixture +def capfdbinary(request): + """Enable bytes capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``byte`` objects. + """ + _ensure_only_one_capture_fixture(request, "capfdbinary") + if not hasattr(os, "dup"): + pytest.skip( + "capfdbinary fixture needs os.dup function which is not available in this system" + ) + with _install_capture_fixture_on_item(request, FDCaptureBinary) as fixture: + yield fixture + + +@contextlib.contextmanager +def _install_capture_fixture_on_item(request, capture_class): + """ + Context manager which creates a ``CaptureFixture`` instance and "installs" it on + the item/node of the given request. Used by ``capsys`` and ``capfd``. + + The CaptureFixture is added as attribute of the item because it needs to accessed + by ``CaptureManager`` during its ``pytest_runtest_*`` hooks. + """ + request.node._capture_fixture = fixture = CaptureFixture(capture_class, request) + capmanager = request.config.pluginmanager.getplugin("capturemanager") + # Need to active this fixture right away in case it is being used by another fixture (setup phase). + # If this fixture is being used only by a test function (call phase), then we wouldn't need this + # activation, but it doesn't hurt. + capmanager.activate_fixture(request.node) + yield fixture + fixture.close() + del request.node._capture_fixture + + +class CaptureFixture(object): + """ + Object returned by :py:func:`capsys`, :py:func:`capsysbinary`, :py:func:`capfd` and :py:func:`capfdbinary` + fixtures. + """ + + def __init__(self, captureclass, request): + self.captureclass = captureclass + self.request = request + self._capture = None + self._captured_out = self.captureclass.EMPTY_BUFFER + self._captured_err = self.captureclass.EMPTY_BUFFER + + def _start(self): + if self._capture is None: + self._capture = MultiCapture( + out=True, err=True, in_=False, Capture=self.captureclass + ) + self._capture.start_capturing() + + def close(self): + if self._capture is not None: + out, err = self._capture.pop_outerr_to_orig() + self._captured_out += out + self._captured_err += err + self._capture.stop_capturing() + self._capture = None + + def readouterr(self): + """Read and return the captured output so far, resetting the internal buffer. + + :return: captured content as a namedtuple with ``out`` and ``err`` string attributes + """ + captured_out, captured_err = self._captured_out, self._captured_err + if self._capture is not None: + out, err = self._capture.readouterr() + captured_out += out + captured_err += err + self._captured_out = self.captureclass.EMPTY_BUFFER + self._captured_err = self.captureclass.EMPTY_BUFFER + return CaptureResult(captured_out, captured_err) + + def _suspend(self): + """Suspends this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.suspend_capturing() + + def _resume(self): + """Resumes this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.resume_capturing() + + @contextlib.contextmanager + def disabled(self): + """Temporarily disables capture while inside the 'with' block.""" + capmanager = self.request.config.pluginmanager.getplugin("capturemanager") + with capmanager.global_and_fixture_disabled(): + yield + + +def safe_text_dupfile(f, mode, default_encoding="UTF8"): + """ return an open text file object that's a duplicate of f on the + FD-level if possible. + """ + encoding = getattr(f, "encoding", None) + try: + fd = f.fileno() + except Exception: + if "b" not in getattr(f, "mode", "") and hasattr(f, "encoding"): + # we seem to have a text stream, let's just use it + return f + else: + newfd = os.dup(fd) + if "b" not in mode: + mode += "b" + f = os.fdopen(newfd, mode, 0) # no buffering + return EncodedFile(f, encoding or default_encoding) + + +class EncodedFile(object): + errors = "strict" # possibly needed by py3 code (issue555) + + def __init__(self, buffer, encoding): + self.buffer = buffer + self.encoding = encoding + + def write(self, obj): + if isinstance(obj, six.text_type): + obj = obj.encode(self.encoding, "replace") + elif _PY3: + raise TypeError( + "write() argument must be str, not {}".format(type(obj).__name__) + ) + self.buffer.write(obj) + + def writelines(self, linelist): + data = "".join(linelist) + self.write(data) + + @property + def name(self): + """Ensure that file.name is a string.""" + return repr(self.buffer) + + @property + def mode(self): + return self.buffer.mode.replace("b", "") + + def __getattr__(self, name): + return getattr(object.__getattribute__(self, "buffer"), name) + + +CaptureResult = collections.namedtuple("CaptureResult", ["out", "err"]) + + +class MultiCapture(object): + out = err = in_ = None + _state = None + + def __init__(self, out=True, err=True, in_=True, Capture=None): + if in_: + self.in_ = Capture(0) + if out: + self.out = Capture(1) + if err: + self.err = Capture(2) + + def __repr__(self): + return "" % ( + self.out, + self.err, + self.in_, + self._state, + getattr(self, "_in_suspended", ""), + ) + + def start_capturing(self): + self._state = "started" + if self.in_: + self.in_.start() + if self.out: + self.out.start() + if self.err: + self.err.start() + + def pop_outerr_to_orig(self): + """ pop current snapshot out/err capture and flush to orig streams. """ + out, err = self.readouterr() + if out: + self.out.writeorg(out) + if err: + self.err.writeorg(err) + return out, err + + def suspend_capturing(self, in_=False): + self._state = "suspended" + if self.out: + self.out.suspend() + if self.err: + self.err.suspend() + if in_ and self.in_: + self.in_.suspend() + self._in_suspended = True + + def resume_capturing(self): + self._state = "resumed" + if self.out: + self.out.resume() + if self.err: + self.err.resume() + if hasattr(self, "_in_suspended"): + self.in_.resume() + del self._in_suspended + + def stop_capturing(self): + """ stop capturing and reset capturing streams """ + if self._state == "stopped": + raise ValueError("was already stopped") + self._state = "stopped" + if self.out: + self.out.done() + if self.err: + self.err.done() + if self.in_: + self.in_.done() + + def readouterr(self): + """ return snapshot unicode value of stdout/stderr capturings. """ + return CaptureResult( + self.out.snap() if self.out is not None else "", + self.err.snap() if self.err is not None else "", + ) + + +class NoCapture(object): + EMPTY_BUFFER = None + __init__ = start = done = suspend = resume = lambda *args: None + + +class FDCaptureBinary(object): + """Capture IO to/from a given os-level filedescriptor. + + snap() produces `bytes` + """ + + EMPTY_BUFFER = b"" + _state = None + + def __init__(self, targetfd, tmpfile=None): + self.targetfd = targetfd + try: + self.targetfd_save = os.dup(self.targetfd) + except OSError: + self.start = lambda: None + self.done = lambda: None + else: + if targetfd == 0: + assert not tmpfile, "cannot set tmpfile with stdin" + tmpfile = open(os.devnull, "r") + self.syscapture = SysCapture(targetfd) + else: + if tmpfile is None: + f = TemporaryFile() + with f: + tmpfile = safe_text_dupfile(f, mode="wb+") + if targetfd in patchsysdict: + self.syscapture = SysCapture(targetfd, tmpfile) + else: + self.syscapture = NoCapture() + self.tmpfile = tmpfile + self.tmpfile_fd = tmpfile.fileno() + + def __repr__(self): + return "" % ( + self.targetfd, + getattr(self, "targetfd_save", None), + self._state, + ) + + def start(self): + """ Start capturing on targetfd using memorized tmpfile. """ + try: + os.fstat(self.targetfd_save) + except (AttributeError, OSError): + raise ValueError("saved filedescriptor not valid anymore") + os.dup2(self.tmpfile_fd, self.targetfd) + self.syscapture.start() + self._state = "started" + + def snap(self): + self.tmpfile.seek(0) + res = self.tmpfile.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def done(self): + """ stop capturing, restore streams, return original capture file, + seeked to position zero. """ + targetfd_save = self.__dict__.pop("targetfd_save") + os.dup2(targetfd_save, self.targetfd) + os.close(targetfd_save) + self.syscapture.done() + _attempt_to_close_capture_file(self.tmpfile) + self._state = "done" + + def suspend(self): + self.syscapture.suspend() + os.dup2(self.targetfd_save, self.targetfd) + self._state = "suspended" + + def resume(self): + self.syscapture.resume() + os.dup2(self.tmpfile_fd, self.targetfd) + self._state = "resumed" + + def writeorg(self, data): + """ write to original file descriptor. """ + if isinstance(data, six.text_type): + data = data.encode("utf8") # XXX use encoding of original stream + os.write(self.targetfd_save, data) + + +class FDCapture(FDCaptureBinary): + """Capture IO to/from a given os-level filedescriptor. + + snap() produces text + """ + + EMPTY_BUFFER = str() + + def snap(self): + res = super(FDCapture, self).snap() + enc = getattr(self.tmpfile, "encoding", None) + if enc and isinstance(res, bytes): + res = six.text_type(res, enc, "replace") + return res + + +class SysCapture(object): + + EMPTY_BUFFER = str() + _state = None + + def __init__(self, fd, tmpfile=None): + name = patchsysdict[fd] + self._old = getattr(sys, name) + self.name = name + if tmpfile is None: + if name == "stdin": + tmpfile = DontReadFromInput() + else: + tmpfile = CaptureIO() + self.tmpfile = tmpfile + + def __repr__(self): + return "" % ( + self.name, + self._old, + self.tmpfile, + self._state, + ) + + def start(self): + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + def snap(self): + res = self.tmpfile.getvalue() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def done(self): + setattr(sys, self.name, self._old) + del self._old + _attempt_to_close_capture_file(self.tmpfile) + self._state = "done" + + def suspend(self): + setattr(sys, self.name, self._old) + self._state = "suspended" + + def resume(self): + setattr(sys, self.name, self.tmpfile) + self._state = "resumed" + + def writeorg(self, data): + self._old.write(data) + self._old.flush() + + +class SysCaptureBinary(SysCapture): + EMPTY_BUFFER = b"" + + def snap(self): + res = self.tmpfile.buffer.getvalue() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + +class DontReadFromInput(six.Iterator): + """Temporary stub class. Ideally when stdin is accessed, the + capturing should be turned off, with possibly all data captured + so far sent to the screen. This should be configurable, though, + because in automated test runs it is better to crash than + hang indefinitely. + """ + + encoding = None + + def read(self, *args): + raise IOError("reading from stdin while output is captured") + + readline = read + readlines = read + __next__ = read + + def __iter__(self): + return self + + def fileno(self): + raise UnsupportedOperation("redirected stdin is pseudofile, has no fileno()") + + def isatty(self): + return False + + def close(self): + pass + + @property + def buffer(self): + if sys.version_info >= (3, 0): + return self + else: + raise AttributeError("redirected stdin has no attribute buffer") + + +def _colorama_workaround(): + """ + Ensure colorama is imported so that it attaches to the correct stdio + handles on Windows. + + colorama uses the terminal on import time. So if something does the + first import of colorama while I/O capture is active, colorama will + fail in various ways. + """ + if sys.platform.startswith("win32"): + try: + import colorama # noqa: F401 + except ImportError: + pass + + +def _readline_workaround(): + """ + Ensure readline is imported so that it attaches to the correct stdio + handles on Windows. + + Pdb uses readline support where available--when not running from the Python + prompt, the readline module is not imported until running the pdb REPL. If + running pytest with the --pdb option this means the readline module is not + imported until after I/O capture has been started. + + This is a problem for pyreadline, which is often used to implement readline + support on Windows, as it does not attach to the correct handles for stdout + and/or stdin if they have been redirected by the FDCapture mechanism. This + workaround ensures that readline is imported before I/O capture is setup so + that it can attach to the actual stdin/out for the console. + + See https://github.com/pytest-dev/pytest/pull/1281 + """ + if sys.platform.startswith("win32"): + try: + import readline # noqa: F401 + except ImportError: + pass + + +def _py36_windowsconsoleio_workaround(stream): + """ + Python 3.6 implemented unicode console handling for Windows. This works + by reading/writing to the raw console handle using + ``{Read,Write}ConsoleW``. + + The problem is that we are going to ``dup2`` over the stdio file + descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the + handles used by Python to write to the console. Though there is still some + weirdness and the console handle seems to only be closed randomly and not + on the first call to ``CloseHandle``, or maybe it gets reopened with the + same handle value when we suspend capturing. + + The workaround in this case will reopen stdio with a different fd which + also means a different handle by replicating the logic in + "Py_lifecycle.c:initstdio/create_stdio". + + :param stream: in practice ``sys.stdout`` or ``sys.stderr``, but given + here as parameter for unittesting purposes. + + See https://github.com/pytest-dev/py/issues/103 + """ + if not sys.platform.startswith("win32") or sys.version_info[:2] < (3, 6): + return + + # bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666) + if not hasattr(stream, "buffer"): + return + + buffered = hasattr(stream.buffer, "raw") + raw_stdout = stream.buffer.raw if buffered else stream.buffer + + if not isinstance(raw_stdout, io._WindowsConsoleIO): + return + + def _reopen_stdio(f, mode): + if not buffered and mode[0] == "w": + buffering = 0 + else: + buffering = -1 + + return io.TextIOWrapper( + open(os.dup(f.fileno()), mode, buffering), + f.encoding, + f.errors, + f.newlines, + f.line_buffering, + ) + + sys.stdin = _reopen_stdio(sys.stdin, "rb") + sys.stdout = _reopen_stdio(sys.stdout, "wb") + sys.stderr = _reopen_stdio(sys.stderr, "wb") + + +def _attempt_to_close_capture_file(f): + """Suppress IOError when closing the temporary file used for capturing streams in py27 (#2370)""" + if six.PY2: + try: + f.close() + except IOError: + pass + else: + f.close() diff --git a/venv/lib/python2.7/site-packages/_pytest/compat.py b/venv/lib/python2.7/site-packages/_pytest/compat.py new file mode 100644 index 0000000..d0add53 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/compat.py @@ -0,0 +1,470 @@ +# -*- coding: utf-8 -*- +""" +python version compatibility code +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import codecs +import functools +import inspect +import re +import sys +from contextlib import contextmanager + +import attr +import py +import six +from six import text_type + +import _pytest +from _pytest._io.saferepr import saferepr +from _pytest.outcomes import fail +from _pytest.outcomes import TEST_OUTCOME + +try: + import enum +except ImportError: # pragma: no cover + # Only available in Python 3.4+ or as a backport + enum = None + +_PY3 = sys.version_info > (3, 0) +_PY2 = not _PY3 + + +if _PY3: + from inspect import signature, Parameter as Parameter +else: + from funcsigs import signature, Parameter as Parameter + +NOTSET = object() + +PY35 = sys.version_info[:2] >= (3, 5) +PY36 = sys.version_info[:2] >= (3, 6) +MODULE_NOT_FOUND_ERROR = "ModuleNotFoundError" if PY36 else "ImportError" + + +if _PY3: + from collections.abc import MutableMapping as MappingMixin + from collections.abc import Iterable, Mapping, Sequence, Sized +else: + # those raise DeprecationWarnings in Python >=3.7 + from collections import MutableMapping as MappingMixin # noqa + from collections import Iterable, Mapping, Sequence, Sized # noqa + + +if sys.version_info >= (3, 4): + from importlib.util import spec_from_file_location +else: + + def spec_from_file_location(*_, **__): + return None + + +if sys.version_info >= (3, 8): + from importlib import metadata as importlib_metadata # noqa +else: + import importlib_metadata # noqa + + +def _format_args(func): + return str(signature(func)) + + +isfunction = inspect.isfunction +isclass = inspect.isclass +# used to work around a python2 exception info leak +exc_clear = getattr(sys, "exc_clear", lambda: None) +# The type of re.compile objects is not exposed in Python. +REGEX_TYPE = type(re.compile("")) + + +def is_generator(func): + genfunc = inspect.isgeneratorfunction(func) + return genfunc and not iscoroutinefunction(func) + + +def iscoroutinefunction(func): + """Return True if func is a decorated coroutine function. + + Note: copied and modified from Python 3.5's builtin couroutines.py to avoid import asyncio directly, + which in turns also initializes the "logging" module as side-effect (see issue #8). + """ + return getattr(func, "_is_coroutine", False) or ( + hasattr(inspect, "iscoroutinefunction") and inspect.iscoroutinefunction(func) + ) + + +def getlocation(function, curdir): + function = get_real_func(function) + fn = py.path.local(inspect.getfile(function)) + lineno = function.__code__.co_firstlineno + if fn.relto(curdir): + fn = fn.relto(curdir) + return "%s:%d" % (fn, lineno + 1) + + +def num_mock_patch_args(function): + """ return number of arguments used up by mock arguments (if any) """ + patchings = getattr(function, "patchings", None) + if not patchings: + return 0 + mock_modules = [sys.modules.get("mock"), sys.modules.get("unittest.mock")] + if any(mock_modules): + sentinels = [m.DEFAULT for m in mock_modules if m is not None] + return len( + [p for p in patchings if not p.attribute_name and p.new in sentinels] + ) + return len(patchings) + + +def getfuncargnames(function, is_method=False, cls=None): + """Returns the names of a function's mandatory arguments. + + This should return the names of all function arguments that: + * Aren't bound to an instance or type as in instance or class methods. + * Don't have default values. + * Aren't bound with functools.partial. + * Aren't replaced with mocks. + + The is_method and cls arguments indicate that the function should + be treated as a bound method even though it's not unless, only in + the case of cls, the function is a static method. + + @RonnyPfannschmidt: This function should be refactored when we + revisit fixtures. The fixture mechanism should ask the node for + the fixture names, and not try to obtain directly from the + function object well after collection has occurred. + + """ + # The parameters attribute of a Signature object contains an + # ordered mapping of parameter names to Parameter instances. This + # creates a tuple of the names of the parameters that don't have + # defaults. + try: + parameters = signature(function).parameters + except (ValueError, TypeError) as e: + fail( + "Could not determine arguments of {!r}: {}".format(function, e), + pytrace=False, + ) + + arg_names = tuple( + p.name + for p in parameters.values() + if ( + p.kind is Parameter.POSITIONAL_OR_KEYWORD + or p.kind is Parameter.KEYWORD_ONLY + ) + and p.default is Parameter.empty + ) + # If this function should be treated as a bound method even though + # it's passed as an unbound method or function, remove the first + # parameter name. + if is_method or ( + cls and not isinstance(cls.__dict__.get(function.__name__, None), staticmethod) + ): + arg_names = arg_names[1:] + # Remove any names that will be replaced with mocks. + if hasattr(function, "__wrapped__"): + arg_names = arg_names[num_mock_patch_args(function) :] + return arg_names + + +@contextmanager +def dummy_context_manager(): + """Context manager that does nothing, useful in situations where you might need an actual context manager or not + depending on some condition. Using this allow to keep the same code""" + yield + + +def get_default_arg_names(function): + # Note: this code intentionally mirrors the code at the beginning of getfuncargnames, + # to get the arguments which were excluded from its result because they had default values + return tuple( + p.name + for p in signature(function).parameters.values() + if p.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY) + and p.default is not Parameter.empty + ) + + +_non_printable_ascii_translate_table = { + i: u"\\x{:02x}".format(i) for i in range(128) if i not in range(32, 127) +} +_non_printable_ascii_translate_table.update( + {ord("\t"): u"\\t", ord("\r"): u"\\r", ord("\n"): u"\\n"} +) + + +def _translate_non_printable(s): + return s.translate(_non_printable_ascii_translate_table) + + +if _PY3: + STRING_TYPES = bytes, str + UNICODE_TYPES = six.text_type + + if PY35: + + def _bytes_to_ascii(val): + return val.decode("ascii", "backslashreplace") + + else: + + def _bytes_to_ascii(val): + if val: + # source: http://goo.gl/bGsnwC + encoded_bytes, _ = codecs.escape_encode(val) + return encoded_bytes.decode("ascii") + else: + # empty bytes crashes codecs.escape_encode (#1087) + return "" + + def ascii_escaped(val): + """If val is pure ascii, returns it as a str(). Otherwise, escapes + bytes objects into a sequence of escaped bytes: + + b'\xc3\xb4\xc5\xd6' -> u'\\xc3\\xb4\\xc5\\xd6' + + and escapes unicode objects into a sequence of escaped unicode + ids, e.g.: + + '4\\nV\\U00043efa\\x0eMXWB\\x1e\\u3028\\u15fd\\xcd\\U0007d944' + + note: + the obvious "v.decode('unicode-escape')" will return + valid utf-8 unicode if it finds them in bytes, but we + want to return escaped bytes for any byte, even if they match + a utf-8 string. + + """ + if isinstance(val, bytes): + ret = _bytes_to_ascii(val) + else: + ret = val.encode("unicode_escape").decode("ascii") + return _translate_non_printable(ret) + + +else: + STRING_TYPES = six.string_types + UNICODE_TYPES = six.text_type + + def ascii_escaped(val): + """In py2 bytes and str are the same type, so return if it's a bytes + object, return it unchanged if it is a full ascii string, + otherwise escape it into its binary form. + + If it's a unicode string, change the unicode characters into + unicode escapes. + + """ + if isinstance(val, bytes): + try: + ret = val.decode("ascii") + except UnicodeDecodeError: + ret = val.encode("string-escape").decode("ascii") + else: + ret = val.encode("unicode-escape").decode("ascii") + return _translate_non_printable(ret) + + +class _PytestWrapper(object): + """Dummy wrapper around a function object for internal use only. + + Used to correctly unwrap the underlying function object + when we are creating fixtures, because we wrap the function object ourselves with a decorator + to issue warnings when the fixture function is called directly. + """ + + def __init__(self, obj): + self.obj = obj + + +def get_real_func(obj): + """ gets the real function object of the (possibly) wrapped object by + functools.wraps or functools.partial. + """ + start_obj = obj + for i in range(100): + # __pytest_wrapped__ is set by @pytest.fixture when wrapping the fixture function + # to trigger a warning if it gets called directly instead of by pytest: we don't + # want to unwrap further than this otherwise we lose useful wrappings like @mock.patch (#3774) + new_obj = getattr(obj, "__pytest_wrapped__", None) + if isinstance(new_obj, _PytestWrapper): + obj = new_obj.obj + break + new_obj = getattr(obj, "__wrapped__", None) + if new_obj is None: + break + obj = new_obj + else: + raise ValueError( + ("could not find real function of {start}\nstopped at {current}").format( + start=saferepr(start_obj), current=saferepr(obj) + ) + ) + if isinstance(obj, functools.partial): + obj = obj.func + return obj + + +def get_real_method(obj, holder): + """ + Attempts to obtain the real function object that might be wrapping ``obj``, while at the same time + returning a bound method to ``holder`` if the original object was a bound method. + """ + try: + is_method = hasattr(obj, "__func__") + obj = get_real_func(obj) + except Exception: + return obj + if is_method and hasattr(obj, "__get__") and callable(obj.__get__): + obj = obj.__get__(holder) + return obj + + +def getfslineno(obj): + # xxx let decorators etc specify a sane ordering + obj = get_real_func(obj) + if hasattr(obj, "place_as"): + obj = obj.place_as + fslineno = _pytest._code.getfslineno(obj) + assert isinstance(fslineno[1], int), obj + return fslineno + + +def getimfunc(func): + try: + return func.__func__ + except AttributeError: + return func + + +def safe_getattr(object, name, default): + """ Like getattr but return default upon any Exception or any OutcomeException. + + Attribute access can potentially fail for 'evil' Python objects. + See issue #214. + It catches OutcomeException because of #2490 (issue #580), new outcomes are derived from BaseException + instead of Exception (for more details check #2707) + """ + try: + return getattr(object, name, default) + except TEST_OUTCOME: + return default + + +def safe_isclass(obj): + """Ignore any exception via isinstance on Python 3.""" + try: + return isclass(obj) + except Exception: + return False + + +def _is_unittest_unexpected_success_a_failure(): + """Return if the test suite should fail if an @expectedFailure unittest test PASSES. + + From https://docs.python.org/3/library/unittest.html?highlight=unittest#unittest.TestResult.wasSuccessful: + Changed in version 3.4: Returns False if there were any + unexpectedSuccesses from tests marked with the expectedFailure() decorator. + """ + return sys.version_info >= (3, 4) + + +if _PY3: + + def safe_str(v): + """returns v as string""" + return str(v) + + +else: + + def safe_str(v): + """returns v as string, converting to utf-8 if necessary""" + try: + return str(v) + except UnicodeError: + if not isinstance(v, text_type): + v = text_type(v) + errors = "replace" + return v.encode("utf-8", errors) + + +COLLECT_FAKEMODULE_ATTRIBUTES = ( + "Collector", + "Module", + "Function", + "Instance", + "Session", + "Item", + "Class", + "File", + "_fillfuncargs", +) + + +def _setup_collect_fakemodule(): + from types import ModuleType + import pytest + + pytest.collect = ModuleType("pytest.collect") + pytest.collect.__all__ = [] # used for setns + for attribute in COLLECT_FAKEMODULE_ATTRIBUTES: + setattr(pytest.collect, attribute, getattr(pytest, attribute)) + + +if _PY2: + # Without this the test_dupfile_on_textio will fail, otherwise CaptureIO could directly inherit from StringIO. + from py.io import TextIO + + class CaptureIO(TextIO): + @property + def encoding(self): + return getattr(self, "_encoding", "UTF-8") + + +else: + import io + + class CaptureIO(io.TextIOWrapper): + def __init__(self): + super(CaptureIO, self).__init__( + io.BytesIO(), encoding="UTF-8", newline="", write_through=True + ) + + def getvalue(self): + return self.buffer.getvalue().decode("UTF-8") + + +class FuncargnamesCompatAttr(object): + """ helper class so that Metafunc, Function and FixtureRequest + don't need to each define the "funcargnames" compatibility attribute. + """ + + @property + def funcargnames(self): + """ alias attribute for ``fixturenames`` for pre-2.3 compatibility""" + return self.fixturenames + + +if six.PY2: + + def lru_cache(*_, **__): + def dec(fn): + return fn + + return dec + + +else: + from functools import lru_cache # noqa: F401 + + +if getattr(attr, "__version_info__", ()) >= (19, 2): + ATTRS_EQ_FIELD = "eq" +else: + ATTRS_EQ_FIELD = "cmp" diff --git a/venv/lib/python2.7/site-packages/_pytest/config/__init__.py b/venv/lib/python2.7/site-packages/_pytest/config/__init__.py new file mode 100644 index 0000000..0737ff9 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/config/__init__.py @@ -0,0 +1,1203 @@ +# -*- coding: utf-8 -*- +""" command line options, ini-file and conftest.py processing. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import argparse +import copy +import inspect +import os +import shlex +import sys +import types +import warnings + +import attr +import py +import six +from packaging.version import Version +from pluggy import HookimplMarker +from pluggy import HookspecMarker +from pluggy import PluginManager + +import _pytest._code +import _pytest.assertion +import _pytest.hookspec # the extension point definitions +from .exceptions import PrintHelp +from .exceptions import UsageError +from .findpaths import determine_setup +from .findpaths import exists +from _pytest import deprecated +from _pytest._code import ExceptionInfo +from _pytest._code import filter_traceback +from _pytest.compat import importlib_metadata +from _pytest.compat import lru_cache +from _pytest.compat import safe_str +from _pytest.outcomes import fail +from _pytest.outcomes import Skipped +from _pytest.pathlib import Path +from _pytest.warning_types import PytestConfigWarning + +hookimpl = HookimplMarker("pytest") +hookspec = HookspecMarker("pytest") + + +class ConftestImportFailure(Exception): + def __init__(self, path, excinfo): + Exception.__init__(self, path, excinfo) + self.path = path + self.excinfo = excinfo + + +def main(args=None, plugins=None): + """ return exit code, after performing an in-process test run. + + :arg args: list of command line arguments. + + :arg plugins: list of plugin objects to be auto-registered during + initialization. + """ + from _pytest.main import EXIT_USAGEERROR + + try: + try: + config = _prepareconfig(args, plugins) + except ConftestImportFailure as e: + exc_info = ExceptionInfo(e.excinfo) + tw = py.io.TerminalWriter(sys.stderr) + tw.line( + "ImportError while loading conftest '{e.path}'.".format(e=e), red=True + ) + exc_info.traceback = exc_info.traceback.filter(filter_traceback) + exc_repr = ( + exc_info.getrepr(style="short", chain=False) + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = safe_str(exc_repr) + for line in formatted_tb.splitlines(): + tw.line(line.rstrip(), red=True) + return 4 + else: + try: + return config.hook.pytest_cmdline_main(config=config) + finally: + config._ensure_unconfigure() + except UsageError as e: + tw = py.io.TerminalWriter(sys.stderr) + for msg in e.args: + tw.line("ERROR: {}\n".format(msg), red=True) + return EXIT_USAGEERROR + + +class cmdline(object): # compatibility namespace + main = staticmethod(main) + + +def filename_arg(path, optname): + """ Argparse type validator for filename arguments. + + :path: path of filename + :optname: name of the option + """ + if os.path.isdir(path): + raise UsageError("{} must be a filename, given: {}".format(optname, path)) + return path + + +def directory_arg(path, optname): + """Argparse type validator for directory arguments. + + :path: path of directory + :optname: name of the option + """ + if not os.path.isdir(path): + raise UsageError("{} must be a directory, given: {}".format(optname, path)) + return path + + +# Plugins that cannot be disabled via "-p no:X" currently. +essential_plugins = ( + "mark", + "main", + "runner", + "fixtures", + "helpconfig", # Provides -p. +) + +default_plugins = essential_plugins + ( + "python", + "terminal", + "debugging", + "unittest", + "capture", + "skipping", + "tmpdir", + "monkeypatch", + "recwarn", + "pastebin", + "nose", + "assertion", + "junitxml", + "resultlog", + "doctest", + "cacheprovider", + "freeze_support", + "setuponly", + "setupplan", + "stepwise", + "warnings", + "logging", + "reports", +) + +builtin_plugins = set(default_plugins) +builtin_plugins.add("pytester") + + +def get_config(args=None, plugins=None): + # subsequent calls to main will create a fresh instance + pluginmanager = PytestPluginManager() + config = Config( + pluginmanager, + invocation_params=Config.InvocationParams( + args=args, plugins=plugins, dir=Path().resolve() + ), + ) + + if args is not None: + # Handle any "-p no:plugin" args. + pluginmanager.consider_preparse(args) + + for spec in default_plugins: + pluginmanager.import_plugin(spec) + return config + + +def get_plugin_manager(): + """ + Obtain a new instance of the + :py:class:`_pytest.config.PytestPluginManager`, with default plugins + already loaded. + + This function can be used by integration with other tools, like hooking + into pytest to run tests into an IDE. + """ + return get_config().pluginmanager + + +def _prepareconfig(args=None, plugins=None): + warning = None + if args is None: + args = sys.argv[1:] + elif isinstance(args, py.path.local): + args = [str(args)] + elif not isinstance(args, (tuple, list)): + msg = "`args` parameter expected to be a list or tuple of strings, got: {!r} (type: {})" + raise TypeError(msg.format(args, type(args))) + + config = get_config(args, plugins) + pluginmanager = config.pluginmanager + try: + if plugins: + for plugin in plugins: + if isinstance(plugin, six.string_types): + pluginmanager.consider_pluginarg(plugin) + else: + pluginmanager.register(plugin) + if warning: + from _pytest.warnings import _issue_warning_captured + + _issue_warning_captured(warning, hook=config.hook, stacklevel=4) + return pluginmanager.hook.pytest_cmdline_parse( + pluginmanager=pluginmanager, args=args + ) + except BaseException: + config._ensure_unconfigure() + raise + + +class PytestPluginManager(PluginManager): + """ + Overwrites :py:class:`pluggy.PluginManager ` to add pytest-specific + functionality: + + * loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and + ``pytest_plugins`` global variables found in plugins being loaded; + * ``conftest.py`` loading during start-up; + """ + + def __init__(self): + super(PytestPluginManager, self).__init__("pytest") + self._conftest_plugins = set() + + # state related to local conftest plugins + self._dirpath2confmods = {} + self._conftestpath2mod = {} + self._confcutdir = None + self._noconftest = False + self._duplicatepaths = set() + + self.add_hookspecs(_pytest.hookspec) + self.register(self) + if os.environ.get("PYTEST_DEBUG"): + err = sys.stderr + encoding = getattr(err, "encoding", "utf8") + try: + err = py.io.dupfile(err, encoding=encoding) + except Exception: + pass + self.trace.root.setwriter(err.write) + self.enable_tracing() + + # Config._consider_importhook will set a real object if required. + self.rewrite_hook = _pytest.assertion.DummyRewriteHook() + # Used to know when we are importing conftests after the pytest_configure stage + self._configured = False + + def addhooks(self, module_or_class): + """ + .. deprecated:: 2.8 + + Use :py:meth:`pluggy.PluginManager.add_hookspecs ` + instead. + """ + warnings.warn(deprecated.PLUGIN_MANAGER_ADDHOOKS, stacklevel=2) + return self.add_hookspecs(module_or_class) + + def parse_hookimpl_opts(self, plugin, name): + # pytest hooks are always prefixed with pytest_ + # so we avoid accessing possibly non-readable attributes + # (see issue #1073) + if not name.startswith("pytest_"): + return + # ignore names which can not be hooks + if name == "pytest_plugins": + return + + method = getattr(plugin, name) + opts = super(PytestPluginManager, self).parse_hookimpl_opts(plugin, name) + + # consider only actual functions for hooks (#3775) + if not inspect.isroutine(method): + return + + # collect unmarked hooks as long as they have the `pytest_' prefix + if opts is None and name.startswith("pytest_"): + opts = {} + if opts is not None: + # TODO: DeprecationWarning, people should use hookimpl + # https://github.com/pytest-dev/pytest/issues/4562 + known_marks = {m.name for m in getattr(method, "pytestmark", [])} + + for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"): + opts.setdefault(name, hasattr(method, name) or name in known_marks) + return opts + + def parse_hookspec_opts(self, module_or_class, name): + opts = super(PytestPluginManager, self).parse_hookspec_opts( + module_or_class, name + ) + if opts is None: + method = getattr(module_or_class, name) + + if name.startswith("pytest_"): + # todo: deprecate hookspec hacks + # https://github.com/pytest-dev/pytest/issues/4562 + known_marks = {m.name for m in getattr(method, "pytestmark", [])} + opts = { + "firstresult": hasattr(method, "firstresult") + or "firstresult" in known_marks, + "historic": hasattr(method, "historic") + or "historic" in known_marks, + } + return opts + + def register(self, plugin, name=None): + if name in ["pytest_catchlog", "pytest_capturelog"]: + warnings.warn( + PytestConfigWarning( + "{} plugin has been merged into the core, " + "please remove it from your requirements.".format( + name.replace("_", "-") + ) + ) + ) + return + ret = super(PytestPluginManager, self).register(plugin, name) + if ret: + self.hook.pytest_plugin_registered.call_historic( + kwargs=dict(plugin=plugin, manager=self) + ) + + if isinstance(plugin, types.ModuleType): + self.consider_module(plugin) + return ret + + def getplugin(self, name): + # support deprecated naming because plugins (xdist e.g.) use it + return self.get_plugin(name) + + def hasplugin(self, name): + """Return True if the plugin with the given name is registered.""" + return bool(self.get_plugin(name)) + + def pytest_configure(self, config): + # XXX now that the pluginmanager exposes hookimpl(tryfirst...) + # we should remove tryfirst/trylast as markers + config.addinivalue_line( + "markers", + "tryfirst: mark a hook implementation function such that the " + "plugin machinery will try to call it first/as early as possible.", + ) + config.addinivalue_line( + "markers", + "trylast: mark a hook implementation function such that the " + "plugin machinery will try to call it last/as late as possible.", + ) + self._configured = True + + # + # internal API for local conftest plugin handling + # + def _set_initial_conftests(self, namespace): + """ load initial conftest files given a preparsed "namespace". + As conftest files may add their own command line options + which have arguments ('--my-opt somepath') we might get some + false positives. All builtin and 3rd party plugins will have + been loaded, however, so common options will not confuse our logic + here. + """ + current = py.path.local() + self._confcutdir = ( + current.join(namespace.confcutdir, abs=True) + if namespace.confcutdir + else None + ) + self._noconftest = namespace.noconftest + self._using_pyargs = namespace.pyargs + testpaths = namespace.file_or_dir + foundanchor = False + for path in testpaths: + path = str(path) + # remove node-id syntax + i = path.find("::") + if i != -1: + path = path[:i] + anchor = current.join(path, abs=1) + if exists(anchor): # we found some file object + self._try_load_conftest(anchor) + foundanchor = True + if not foundanchor: + self._try_load_conftest(current) + + def _try_load_conftest(self, anchor): + self._getconftestmodules(anchor) + # let's also consider test* subdirs + if anchor.check(dir=1): + for x in anchor.listdir("test*"): + if x.check(dir=1): + self._getconftestmodules(x) + + @lru_cache(maxsize=128) + def _getconftestmodules(self, path): + if self._noconftest: + return [] + + if path.isfile(): + directory = path.dirpath() + else: + directory = path + + if six.PY2: # py2 is not using lru_cache. + try: + return self._dirpath2confmods[directory] + except KeyError: + pass + + # XXX these days we may rather want to use config.rootdir + # and allow users to opt into looking into the rootdir parent + # directories instead of requiring to specify confcutdir + clist = [] + for parent in directory.realpath().parts(): + if self._confcutdir and self._confcutdir.relto(parent): + continue + conftestpath = parent.join("conftest.py") + if conftestpath.isfile(): + # Use realpath to avoid loading the same conftest twice + # with build systems that create build directories containing + # symlinks to actual files. + mod = self._importconftest(conftestpath.realpath()) + clist.append(mod) + self._dirpath2confmods[directory] = clist + return clist + + def _rget_with_confmod(self, name, path): + modules = self._getconftestmodules(path) + for mod in reversed(modules): + try: + return mod, getattr(mod, name) + except AttributeError: + continue + raise KeyError(name) + + def _importconftest(self, conftestpath): + try: + return self._conftestpath2mod[conftestpath] + except KeyError: + pkgpath = conftestpath.pypkgpath() + if pkgpath is None: + _ensure_removed_sysmodule(conftestpath.purebasename) + try: + mod = conftestpath.pyimport() + if ( + hasattr(mod, "pytest_plugins") + and self._configured + and not self._using_pyargs + ): + from _pytest.deprecated import ( + PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST, + ) + + fail( + PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST.format( + conftestpath, self._confcutdir + ), + pytrace=False, + ) + except Exception: + raise ConftestImportFailure(conftestpath, sys.exc_info()) + + self._conftest_plugins.add(mod) + self._conftestpath2mod[conftestpath] = mod + dirpath = conftestpath.dirpath() + if dirpath in self._dirpath2confmods: + for path, mods in self._dirpath2confmods.items(): + if path and path.relto(dirpath) or path == dirpath: + assert mod not in mods + mods.append(mod) + self.trace("loaded conftestmodule %r" % (mod)) + self.consider_conftest(mod) + return mod + + # + # API for bootstrapping plugin loading + # + # + + def consider_preparse(self, args): + i = 0 + n = len(args) + while i < n: + opt = args[i] + i += 1 + if isinstance(opt, six.string_types): + if opt == "-p": + try: + parg = args[i] + except IndexError: + return + i += 1 + elif opt.startswith("-p"): + parg = opt[2:] + else: + continue + self.consider_pluginarg(parg) + + def consider_pluginarg(self, arg): + if arg.startswith("no:"): + name = arg[3:] + if name in essential_plugins: + raise UsageError("plugin %s cannot be disabled" % name) + + # PR #4304 : remove stepwise if cacheprovider is blocked + if name == "cacheprovider": + self.set_blocked("stepwise") + self.set_blocked("pytest_stepwise") + + self.set_blocked(name) + if not name.startswith("pytest_"): + self.set_blocked("pytest_" + name) + else: + name = arg + # Unblock the plugin. None indicates that it has been blocked. + # There is no interface with pluggy for this. + if self._name2plugin.get(name, -1) is None: + del self._name2plugin[name] + if not name.startswith("pytest_"): + if self._name2plugin.get("pytest_" + name, -1) is None: + del self._name2plugin["pytest_" + name] + self.import_plugin(arg, consider_entry_points=True) + + def consider_conftest(self, conftestmodule): + self.register(conftestmodule, name=conftestmodule.__file__) + + def consider_env(self): + self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS")) + + def consider_module(self, mod): + self._import_plugin_specs(getattr(mod, "pytest_plugins", [])) + + def _import_plugin_specs(self, spec): + plugins = _get_plugin_specs_as_list(spec) + for import_spec in plugins: + self.import_plugin(import_spec) + + def import_plugin(self, modname, consider_entry_points=False): + """ + Imports a plugin with ``modname``. If ``consider_entry_points`` is True, entry point + names are also considered to find a plugin. + """ + # most often modname refers to builtin modules, e.g. "pytester", + # "terminal" or "capture". Those plugins are registered under their + # basename for historic purposes but must be imported with the + # _pytest prefix. + assert isinstance(modname, six.string_types), ( + "module name as text required, got %r" % modname + ) + modname = str(modname) + if self.is_blocked(modname) or self.get_plugin(modname) is not None: + return + + importspec = "_pytest." + modname if modname in builtin_plugins else modname + self.rewrite_hook.mark_rewrite(importspec) + + if consider_entry_points: + loaded = self.load_setuptools_entrypoints("pytest11", name=modname) + if loaded: + return + + try: + __import__(importspec) + except ImportError as e: + new_exc_message = 'Error importing plugin "%s": %s' % ( + modname, + safe_str(e.args[0]), + ) + new_exc = ImportError(new_exc_message) + tb = sys.exc_info()[2] + + six.reraise(ImportError, new_exc, tb) + + except Skipped as e: + from _pytest.warnings import _issue_warning_captured + + _issue_warning_captured( + PytestConfigWarning("skipped plugin %r: %s" % (modname, e.msg)), + self.hook, + stacklevel=1, + ) + else: + mod = sys.modules[importspec] + self.register(mod, modname) + + +def _get_plugin_specs_as_list(specs): + """ + Parses a list of "plugin specs" and returns a list of plugin names. + + Plugin specs can be given as a list of strings separated by "," or already as a list/tuple in + which case it is returned as a list. Specs can also be `None` in which case an + empty list is returned. + """ + if specs is not None and not isinstance(specs, types.ModuleType): + if isinstance(specs, six.string_types): + specs = specs.split(",") if specs else [] + if not isinstance(specs, (list, tuple)): + raise UsageError( + "Plugin specs must be a ','-separated string or a " + "list/tuple of strings for plugin names. Given: %r" % specs + ) + return list(specs) + return [] + + +def _ensure_removed_sysmodule(modname): + try: + del sys.modules[modname] + except KeyError: + pass + + +class Notset(object): + def __repr__(self): + return "" + + +notset = Notset() + + +def _iter_rewritable_modules(package_files): + """ + Given an iterable of file names in a source distribution, return the "names" that should + be marked for assertion rewrite (for example the package "pytest_mock/__init__.py" should + be added as "pytest_mock" in the assertion rewrite mechanism. + + This function has to deal with dist-info based distributions and egg based distributions + (which are still very much in use for "editable" installs). + + Here are the file names as seen in a dist-info based distribution: + + pytest_mock/__init__.py + pytest_mock/_version.py + pytest_mock/plugin.py + pytest_mock.egg-info/PKG-INFO + + Here are the file names as seen in an egg based distribution: + + src/pytest_mock/__init__.py + src/pytest_mock/_version.py + src/pytest_mock/plugin.py + src/pytest_mock.egg-info/PKG-INFO + LICENSE + setup.py + + We have to take in account those two distribution flavors in order to determine which + names should be considered for assertion rewriting. + + More information: + https://github.com/pytest-dev/pytest-mock/issues/167 + """ + package_files = list(package_files) + seen_some = False + for fn in package_files: + is_simple_module = "/" not in fn and fn.endswith(".py") + is_package = fn.count("/") == 1 and fn.endswith("__init__.py") + if is_simple_module: + module_name, _ = os.path.splitext(fn) + # we ignore "setup.py" at the root of the distribution + if module_name != "setup": + seen_some = True + yield module_name + elif is_package: + package_name = os.path.dirname(fn) + seen_some = True + yield package_name + + if not seen_some: + # at this point we did not find any packages or modules suitable for assertion + # rewriting, so we try again by stripping the first path component (to account for + # "src" based source trees for example) + # this approach lets us have the common case continue to be fast, as egg-distributions + # are rarer + new_package_files = [] + for fn in package_files: + parts = fn.split("/") + new_fn = "/".join(parts[1:]) + if new_fn: + new_package_files.append(new_fn) + if new_package_files: + for _module in _iter_rewritable_modules(new_package_files): + yield _module + + +class Config(object): + """ + Access to configuration values, pluginmanager and plugin hooks. + + :ivar PytestPluginManager pluginmanager: the plugin manager handles plugin registration and hook invocation. + + :ivar argparse.Namespace option: access to command line option as attributes. + + :ivar InvocationParams invocation_params: + + Object containing the parameters regarding the ``pytest.main`` + invocation. + Contains the followinig read-only attributes: + * ``args``: list of command-line arguments as passed to ``pytest.main()``. + * ``plugins``: list of extra plugins, might be None + * ``dir``: directory where ``pytest.main()`` was invoked from. + """ + + @attr.s(frozen=True) + class InvocationParams(object): + """Holds parameters passed during ``pytest.main()`` + + .. note:: + + Currently the environment variable PYTEST_ADDOPTS is also handled by + pytest implicitly, not being part of the invocation. + + Plugins accessing ``InvocationParams`` must be aware of that. + """ + + args = attr.ib() + plugins = attr.ib() + dir = attr.ib() + + def __init__(self, pluginmanager, invocation_params=None, *args): + from .argparsing import Parser, FILE_OR_DIR + + if invocation_params is None: + invocation_params = self.InvocationParams( + args=(), plugins=None, dir=Path().resolve() + ) + + #: access to command line option as attributes. + #: (deprecated), use :py:func:`getoption() <_pytest.config.Config.getoption>` instead + self.option = argparse.Namespace() + + self.invocation_params = invocation_params + + _a = FILE_OR_DIR + self._parser = Parser( + usage="%%(prog)s [options] [%s] [%s] [...]" % (_a, _a), + processopt=self._processopt, + ) + #: a pluginmanager instance + self.pluginmanager = pluginmanager + self.trace = self.pluginmanager.trace.root.get("config") + self.hook = self.pluginmanager.hook + self._inicache = {} + self._override_ini = () + self._opt2dest = {} + self._cleanup = [] + self.pluginmanager.register(self, "pytestconfig") + self._configured = False + self.hook.pytest_addoption.call_historic(kwargs=dict(parser=self._parser)) + + @property + def invocation_dir(self): + """Backward compatibility""" + return py.path.local(str(self.invocation_params.dir)) + + def add_cleanup(self, func): + """ Add a function to be called when the config object gets out of + use (usually coninciding with pytest_unconfigure).""" + self._cleanup.append(func) + + def _do_configure(self): + assert not self._configured + self._configured = True + self.hook.pytest_configure.call_historic(kwargs=dict(config=self)) + + def _ensure_unconfigure(self): + if self._configured: + self._configured = False + self.hook.pytest_unconfigure(config=self) + self.hook.pytest_configure._call_history = [] + while self._cleanup: + fin = self._cleanup.pop() + fin() + + def get_terminal_writer(self): + return self.pluginmanager.get_plugin("terminalreporter")._tw + + def pytest_cmdline_parse(self, pluginmanager, args): + try: + self.parse(args) + except UsageError: + + # Handle --version and --help here in a minimal fashion. + # This gets done via helpconfig normally, but its + # pytest_cmdline_main is not called in case of errors. + if getattr(self.option, "version", False) or "--version" in args: + from _pytest.helpconfig import showversion + + showversion(self) + elif ( + getattr(self.option, "help", False) or "--help" in args or "-h" in args + ): + self._parser._getparser().print_help() + sys.stdout.write( + "\nNOTE: displaying only minimal help due to UsageError.\n\n" + ) + + raise + + return self + + def notify_exception(self, excinfo, option=None): + if option and getattr(option, "fulltrace", False): + style = "long" + else: + style = "native" + excrepr = excinfo.getrepr( + funcargs=True, showlocals=getattr(option, "showlocals", False), style=style + ) + res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo) + if not any(res): + for line in str(excrepr).split("\n"): + sys.stderr.write("INTERNALERROR> %s\n" % line) + sys.stderr.flush() + + def cwd_relative_nodeid(self, nodeid): + # nodeid's are relative to the rootpath, compute relative to cwd + if self.invocation_dir != self.rootdir: + fullpath = self.rootdir.join(nodeid) + nodeid = self.invocation_dir.bestrelpath(fullpath) + return nodeid + + @classmethod + def fromdictargs(cls, option_dict, args): + """ constructor useable for subprocesses. """ + config = get_config(args) + config.option.__dict__.update(option_dict) + config.parse(args, addopts=False) + for x in config.option.plugins: + config.pluginmanager.consider_pluginarg(x) + return config + + def _processopt(self, opt): + for name in opt._short_opts + opt._long_opts: + self._opt2dest[name] = opt.dest + + if hasattr(opt, "default") and opt.dest: + if not hasattr(self.option, opt.dest): + setattr(self.option, opt.dest, opt.default) + + @hookimpl(trylast=True) + def pytest_load_initial_conftests(self, early_config): + self.pluginmanager._set_initial_conftests(early_config.known_args_namespace) + + def _initini(self, args): + ns, unknown_args = self._parser.parse_known_and_unknown_args( + args, namespace=copy.copy(self.option) + ) + r = determine_setup( + ns.inifilename, + ns.file_or_dir + unknown_args, + rootdir_cmd_arg=ns.rootdir or None, + config=self, + ) + self.rootdir, self.inifile, self.inicfg = r + self._parser.extra_info["rootdir"] = self.rootdir + self._parser.extra_info["inifile"] = self.inifile + self._parser.addini("addopts", "extra command line options", "args") + self._parser.addini("minversion", "minimally required pytest version") + self._override_ini = ns.override_ini or () + + def _consider_importhook(self, args): + """Install the PEP 302 import hook if using assertion rewriting. + + Needs to parse the --assert= option from the commandline + and find all the installed plugins to mark them for rewriting + by the importhook. + """ + ns, unknown_args = self._parser.parse_known_and_unknown_args(args) + mode = getattr(ns, "assertmode", "plain") + if mode == "rewrite": + try: + hook = _pytest.assertion.install_importhook(self) + except SystemError: + mode = "plain" + else: + self._mark_plugins_for_rewrite(hook) + _warn_about_missing_assertion(mode) + + def _mark_plugins_for_rewrite(self, hook): + """ + Given an importhook, mark for rewrite any top-level + modules or packages in the distribution package for + all pytest plugins. + """ + self.pluginmanager.rewrite_hook = hook + + if os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"): + # We don't autoload from setuptools entry points, no need to continue. + return + + package_files = ( + str(file) + for dist in importlib_metadata.distributions() + if any(ep.group == "pytest11" for ep in dist.entry_points) + for file in dist.files or [] + ) + + for name in _iter_rewritable_modules(package_files): + hook.mark_rewrite(name) + + def _validate_args(self, args, via): + """Validate known args.""" + self._parser._config_source_hint = via + try: + self._parser.parse_known_and_unknown_args( + args, namespace=copy.copy(self.option) + ) + finally: + del self._parser._config_source_hint + + return args + + def _preparse(self, args, addopts=True): + if addopts: + env_addopts = os.environ.get("PYTEST_ADDOPTS", "") + if len(env_addopts): + args[:] = ( + self._validate_args(shlex.split(env_addopts), "via PYTEST_ADDOPTS") + + args + ) + self._initini(args) + if addopts: + args[:] = ( + self._validate_args(self.getini("addopts"), "via addopts config") + args + ) + + self._checkversion() + self._consider_importhook(args) + self.pluginmanager.consider_preparse(args) + if not os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD"): + # Don't autoload from setuptools entry point. Only explicitly specified + # plugins are going to be loaded. + self.pluginmanager.load_setuptools_entrypoints("pytest11") + self.pluginmanager.consider_env() + self.known_args_namespace = ns = self._parser.parse_known_args( + args, namespace=copy.copy(self.option) + ) + if self.known_args_namespace.confcutdir is None and self.inifile: + confcutdir = py.path.local(self.inifile).dirname + self.known_args_namespace.confcutdir = confcutdir + try: + self.hook.pytest_load_initial_conftests( + early_config=self, args=args, parser=self._parser + ) + except ConftestImportFailure: + e = sys.exc_info()[1] + if ns.help or ns.version: + # we don't want to prevent --help/--version to work + # so just let is pass and print a warning at the end + from _pytest.warnings import _issue_warning_captured + + _issue_warning_captured( + PytestConfigWarning( + "could not load initial conftests: {}".format(e.path) + ), + self.hook, + stacklevel=2, + ) + else: + raise + + def _checkversion(self): + import pytest + + minver = self.inicfg.get("minversion", None) + if minver: + if Version(minver) > Version(pytest.__version__): + raise pytest.UsageError( + "%s:%d: requires pytest-%s, actual pytest-%s'" + % ( + self.inicfg.config.path, + self.inicfg.lineof("minversion"), + minver, + pytest.__version__, + ) + ) + + def parse(self, args, addopts=True): + # parse given cmdline arguments into this config object. + assert not hasattr( + self, "args" + ), "can only parse cmdline args at most once per Config object" + self._origargs = args + self.hook.pytest_addhooks.call_historic( + kwargs=dict(pluginmanager=self.pluginmanager) + ) + self._preparse(args, addopts=addopts) + # XXX deprecated hook: + self.hook.pytest_cmdline_preparse(config=self, args=args) + self._parser.after_preparse = True + try: + args = self._parser.parse_setoption( + args, self.option, namespace=self.option + ) + if not args: + if self.invocation_dir == self.rootdir: + args = self.getini("testpaths") + if not args: + args = [str(self.invocation_dir)] + self.args = args + except PrintHelp: + pass + + def addinivalue_line(self, name, line): + """ add a line to an ini-file option. The option must have been + declared but might not yet be set in which case the line becomes the + the first line in its value. """ + x = self.getini(name) + assert isinstance(x, list) + x.append(line) # modifies the cached list inline + + def getini(self, name): + """ return configuration value from an :ref:`ini file `. If the + specified name hasn't been registered through a prior + :py:func:`parser.addini <_pytest.config.Parser.addini>` + call (usually from a plugin), a ValueError is raised. """ + try: + return self._inicache[name] + except KeyError: + self._inicache[name] = val = self._getini(name) + return val + + def _getini(self, name): + try: + description, type, default = self._parser._inidict[name] + except KeyError: + raise ValueError("unknown configuration value: %r" % (name,)) + value = self._get_override_ini_value(name) + if value is None: + try: + value = self.inicfg[name] + except KeyError: + if default is not None: + return default + if type is None: + return "" + return [] + if type == "pathlist": + dp = py.path.local(self.inicfg.config.path).dirpath() + values = [] + for relpath in shlex.split(value): + values.append(dp.join(relpath, abs=True)) + return values + elif type == "args": + return shlex.split(value) + elif type == "linelist": + return [t for t in map(lambda x: x.strip(), value.split("\n")) if t] + elif type == "bool": + return bool(_strtobool(value.strip())) + else: + assert type is None + return value + + def _getconftest_pathlist(self, name, path): + try: + mod, relroots = self.pluginmanager._rget_with_confmod(name, path) + except KeyError: + return None + modpath = py.path.local(mod.__file__).dirpath() + values = [] + for relroot in relroots: + if not isinstance(relroot, py.path.local): + relroot = relroot.replace("/", py.path.local.sep) + relroot = modpath.join(relroot, abs=True) + values.append(relroot) + return values + + def _get_override_ini_value(self, name): + value = None + # override_ini is a list of "ini=value" options + # always use the last item if multiple values are set for same ini-name, + # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2 + for ini_config in self._override_ini: + try: + key, user_ini_value = ini_config.split("=", 1) + except ValueError: + raise UsageError("-o/--override-ini expects option=value style.") + else: + if key == name: + value = user_ini_value + return value + + def getoption(self, name, default=notset, skip=False): + """ return command line option value. + + :arg name: name of the option. You may also specify + the literal ``--OPT`` option instead of the "dest" option name. + :arg default: default value if no option of that name exists. + :arg skip: if True raise pytest.skip if option does not exists + or has a None value. + """ + name = self._opt2dest.get(name, name) + try: + val = getattr(self.option, name) + if val is None and skip: + raise AttributeError(name) + return val + except AttributeError: + if default is not notset: + return default + if skip: + import pytest + + pytest.skip("no %r option found" % (name,)) + raise ValueError("no option named %r" % (name,)) + + def getvalue(self, name, path=None): + """ (deprecated, use getoption()) """ + return self.getoption(name) + + def getvalueorskip(self, name, path=None): + """ (deprecated, use getoption(skip=True)) """ + return self.getoption(name, skip=True) + + +def _assertion_supported(): + try: + assert False + except AssertionError: + return True + else: + return False + + +def _warn_about_missing_assertion(mode): + if not _assertion_supported(): + if mode == "plain": + sys.stderr.write( + "WARNING: ASSERTIONS ARE NOT EXECUTED" + " and FAILING TESTS WILL PASS. Are you" + " using python -O?" + ) + else: + sys.stderr.write( + "WARNING: assertions not in test modules or" + " plugins will be ignored" + " because assert statements are not executed " + "by the underlying Python interpreter " + "(are you using python -O?)\n" + ) + + +def setns(obj, dic): + import pytest + + for name, value in dic.items(): + if isinstance(value, dict): + mod = getattr(obj, name, None) + if mod is None: + modname = "pytest.%s" % name + mod = types.ModuleType(modname) + sys.modules[modname] = mod + mod.__all__ = [] + setattr(obj, name, mod) + obj.__all__.append(name) + setns(mod, value) + else: + setattr(obj, name, value) + obj.__all__.append(name) + # if obj != pytest: + # pytest.__all__.append(name) + setattr(pytest, name, value) + + +def create_terminal_writer(config, *args, **kwargs): + """Create a TerminalWriter instance configured according to the options + in the config object. Every code which requires a TerminalWriter object + and has access to a config object should use this function. + """ + tw = py.io.TerminalWriter(*args, **kwargs) + if config.option.color == "yes": + tw.hasmarkup = True + if config.option.color == "no": + tw.hasmarkup = False + return tw + + +def _strtobool(val): + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + + .. note:: copied from distutils.util + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return 1 + elif val in ("n", "no", "f", "false", "off", "0"): + return 0 + else: + raise ValueError("invalid truth value %r" % (val,)) diff --git a/venv/lib/python2.7/site-packages/_pytest/config/argparsing.py b/venv/lib/python2.7/site-packages/_pytest/config/argparsing.py new file mode 100644 index 0000000..37fb772 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/config/argparsing.py @@ -0,0 +1,410 @@ +# -*- coding: utf-8 -*- +import argparse +import warnings + +import py +import six + +from _pytest.config.exceptions import UsageError + +FILE_OR_DIR = "file_or_dir" + + +class Parser(object): + """ Parser for command line arguments and ini-file values. + + :ivar extra_info: dict of generic param -> value to display in case + there's an error processing the command line arguments. + """ + + prog = None + + def __init__(self, usage=None, processopt=None): + self._anonymous = OptionGroup("custom options", parser=self) + self._groups = [] + self._processopt = processopt + self._usage = usage + self._inidict = {} + self._ininames = [] + self.extra_info = {} + + def processoption(self, option): + if self._processopt: + if option.dest: + self._processopt(option) + + def getgroup(self, name, description="", after=None): + """ get (or create) a named option Group. + + :name: name of the option group. + :description: long description for --help output. + :after: name of other group, used for ordering --help output. + + The returned group object has an ``addoption`` method with the same + signature as :py:func:`parser.addoption + <_pytest.config.Parser.addoption>` but will be shown in the + respective group in the output of ``pytest. --help``. + """ + for group in self._groups: + if group.name == name: + return group + group = OptionGroup(name, description, parser=self) + i = 0 + for i, grp in enumerate(self._groups): + if grp.name == after: + break + self._groups.insert(i + 1, group) + return group + + def addoption(self, *opts, **attrs): + """ register a command line option. + + :opts: option names, can be short or long options. + :attrs: same attributes which the ``add_option()`` function of the + `argparse library + `_ + accepts. + + After command line parsing options are available on the pytest config + object via ``config.option.NAME`` where ``NAME`` is usually set + by passing a ``dest`` attribute, for example + ``addoption("--long", dest="NAME", ...)``. + """ + self._anonymous.addoption(*opts, **attrs) + + def parse(self, args, namespace=None): + from _pytest._argcomplete import try_argcomplete + + self.optparser = self._getparser() + try_argcomplete(self.optparser) + args = [str(x) if isinstance(x, py.path.local) else x for x in args] + return self.optparser.parse_args(args, namespace=namespace) + + def _getparser(self): + from _pytest._argcomplete import filescompleter + + optparser = MyOptionParser(self, self.extra_info, prog=self.prog) + groups = self._groups + [self._anonymous] + for group in groups: + if group.options: + desc = group.description or group.name + arggroup = optparser.add_argument_group(desc) + for option in group.options: + n = option.names() + a = option.attrs() + arggroup.add_argument(*n, **a) + # bash like autocompletion for dirs (appending '/') + optparser.add_argument(FILE_OR_DIR, nargs="*").completer = filescompleter + return optparser + + def parse_setoption(self, args, option, namespace=None): + parsedoption = self.parse(args, namespace=namespace) + for name, value in parsedoption.__dict__.items(): + setattr(option, name, value) + return getattr(parsedoption, FILE_OR_DIR) + + def parse_known_args(self, args, namespace=None): + """parses and returns a namespace object with known arguments at this + point. + """ + return self.parse_known_and_unknown_args(args, namespace=namespace)[0] + + def parse_known_and_unknown_args(self, args, namespace=None): + """parses and returns a namespace object with known arguments, and + the remaining arguments unknown at this point. + """ + optparser = self._getparser() + args = [str(x) if isinstance(x, py.path.local) else x for x in args] + return optparser.parse_known_args(args, namespace=namespace) + + def addini(self, name, help, type=None, default=None): + """ register an ini-file option. + + :name: name of the ini-variable + :type: type of the variable, can be ``pathlist``, ``args``, ``linelist`` + or ``bool``. + :default: default value if no ini-file option exists but is queried. + + The value of ini-variables can be retrieved via a call to + :py:func:`config.getini(name) <_pytest.config.Config.getini>`. + """ + assert type in (None, "pathlist", "args", "linelist", "bool") + self._inidict[name] = (help, type, default) + self._ininames.append(name) + + +class ArgumentError(Exception): + """ + Raised if an Argument instance is created with invalid or + inconsistent arguments. + """ + + def __init__(self, msg, option): + self.msg = msg + self.option_id = str(option) + + def __str__(self): + if self.option_id: + return "option %s: %s" % (self.option_id, self.msg) + else: + return self.msg + + +class Argument(object): + """class that mimics the necessary behaviour of optparse.Option + + it's currently a least effort implementation + and ignoring choices and integer prefixes + https://docs.python.org/3/library/optparse.html#optparse-standard-option-types + """ + + _typ_map = {"int": int, "string": str, "float": float, "complex": complex} + + def __init__(self, *names, **attrs): + """store parms in private vars for use in add_argument""" + self._attrs = attrs + self._short_opts = [] + self._long_opts = [] + self.dest = attrs.get("dest") + if "%default" in (attrs.get("help") or ""): + warnings.warn( + 'pytest now uses argparse. "%default" should be' + ' changed to "%(default)s" ', + DeprecationWarning, + stacklevel=3, + ) + try: + typ = attrs["type"] + except KeyError: + pass + else: + # this might raise a keyerror as well, don't want to catch that + if isinstance(typ, six.string_types): + if typ == "choice": + warnings.warn( + "`type` argument to addoption() is the string %r." + " For choices this is optional and can be omitted, " + " but when supplied should be a type (for example `str` or `int`)." + " (options: %s)" % (typ, names), + DeprecationWarning, + stacklevel=4, + ) + # argparse expects a type here take it from + # the type of the first element + attrs["type"] = type(attrs["choices"][0]) + else: + warnings.warn( + "`type` argument to addoption() is the string %r, " + " but when supplied should be a type (for example `str` or `int`)." + " (options: %s)" % (typ, names), + DeprecationWarning, + stacklevel=4, + ) + attrs["type"] = Argument._typ_map[typ] + # used in test_parseopt -> test_parse_defaultgetter + self.type = attrs["type"] + else: + self.type = typ + try: + # attribute existence is tested in Config._processopt + self.default = attrs["default"] + except KeyError: + pass + self._set_opt_strings(names) + if not self.dest: + if self._long_opts: + self.dest = self._long_opts[0][2:].replace("-", "_") + else: + try: + self.dest = self._short_opts[0][1:] + except IndexError: + raise ArgumentError("need a long or short option", self) + + def names(self): + return self._short_opts + self._long_opts + + def attrs(self): + # update any attributes set by processopt + attrs = "default dest help".split() + if self.dest: + attrs.append(self.dest) + for attr in attrs: + try: + self._attrs[attr] = getattr(self, attr) + except AttributeError: + pass + if self._attrs.get("help"): + a = self._attrs["help"] + a = a.replace("%default", "%(default)s") + # a = a.replace('%prog', '%(prog)s') + self._attrs["help"] = a + return self._attrs + + def _set_opt_strings(self, opts): + """directly from optparse + + might not be necessary as this is passed to argparse later on""" + for opt in opts: + if len(opt) < 2: + raise ArgumentError( + "invalid option string %r: " + "must be at least two characters long" % opt, + self, + ) + elif len(opt) == 2: + if not (opt[0] == "-" and opt[1] != "-"): + raise ArgumentError( + "invalid short option string %r: " + "must be of the form -x, (x any non-dash char)" % opt, + self, + ) + self._short_opts.append(opt) + else: + if not (opt[0:2] == "--" and opt[2] != "-"): + raise ArgumentError( + "invalid long option string %r: " + "must start with --, followed by non-dash" % opt, + self, + ) + self._long_opts.append(opt) + + def __repr__(self): + args = [] + if self._short_opts: + args += ["_short_opts: " + repr(self._short_opts)] + if self._long_opts: + args += ["_long_opts: " + repr(self._long_opts)] + args += ["dest: " + repr(self.dest)] + if hasattr(self, "type"): + args += ["type: " + repr(self.type)] + if hasattr(self, "default"): + args += ["default: " + repr(self.default)] + return "Argument({})".format(", ".join(args)) + + +class OptionGroup(object): + def __init__(self, name, description="", parser=None): + self.name = name + self.description = description + self.options = [] + self.parser = parser + + def addoption(self, *optnames, **attrs): + """ add an option to this group. + + if a shortened version of a long option is specified it will + be suppressed in the help. addoption('--twowords', '--two-words') + results in help showing '--two-words' only, but --twowords gets + accepted **and** the automatic destination is in args.twowords + """ + conflict = set(optnames).intersection( + name for opt in self.options for name in opt.names() + ) + if conflict: + raise ValueError("option names %s already added" % conflict) + option = Argument(*optnames, **attrs) + self._addoption_instance(option, shortupper=False) + + def _addoption(self, *optnames, **attrs): + option = Argument(*optnames, **attrs) + self._addoption_instance(option, shortupper=True) + + def _addoption_instance(self, option, shortupper=False): + if not shortupper: + for opt in option._short_opts: + if opt[0] == "-" and opt[1].islower(): + raise ValueError("lowercase shortoptions reserved") + if self.parser: + self.parser.processoption(option) + self.options.append(option) + + +class MyOptionParser(argparse.ArgumentParser): + def __init__(self, parser, extra_info=None, prog=None): + if not extra_info: + extra_info = {} + self._parser = parser + argparse.ArgumentParser.__init__( + self, + prog=prog, + usage=parser._usage, + add_help=False, + formatter_class=DropShorterLongHelpFormatter, + ) + # extra_info is a dict of (param -> value) to display if there's + # an usage error to provide more contextual information to the user + self.extra_info = extra_info + + def error(self, message): + """Transform argparse error message into UsageError.""" + msg = "%s: error: %s" % (self.prog, message) + + if hasattr(self._parser, "_config_source_hint"): + msg = "%s (%s)" % (msg, self._parser._config_source_hint) + + raise UsageError(self.format_usage() + msg) + + def parse_args(self, args=None, namespace=None): + """allow splitting of positional arguments""" + args, argv = self.parse_known_args(args, namespace) + if argv: + for arg in argv: + if arg and arg[0] == "-": + lines = ["unrecognized arguments: %s" % (" ".join(argv))] + for k, v in sorted(self.extra_info.items()): + lines.append(" %s: %s" % (k, v)) + self.error("\n".join(lines)) + getattr(args, FILE_OR_DIR).extend(argv) + return args + + +class DropShorterLongHelpFormatter(argparse.HelpFormatter): + """shorten help for long options that differ only in extra hyphens + + - collapse **long** options that are the same except for extra hyphens + - special action attribute map_long_option allows surpressing additional + long options + - shortcut if there are only two options and one of them is a short one + - cache result on action object as this is called at least 2 times + """ + + def _format_action_invocation(self, action): + orgstr = argparse.HelpFormatter._format_action_invocation(self, action) + if orgstr and orgstr[0] != "-": # only optional arguments + return orgstr + res = getattr(action, "_formatted_action_invocation", None) + if res: + return res + options = orgstr.split(", ") + if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2): + # a shortcut for '-h, --help' or '--abc', '-a' + action._formatted_action_invocation = orgstr + return orgstr + return_list = [] + option_map = getattr(action, "map_long_option", {}) + if option_map is None: + option_map = {} + short_long = {} + for option in options: + if len(option) == 2 or option[2] == " ": + continue + if not option.startswith("--"): + raise ArgumentError( + 'long optional argument without "--": [%s]' % (option), self + ) + xxoption = option[2:] + if xxoption.split()[0] not in option_map: + shortened = xxoption.replace("-", "") + if shortened not in short_long or len(short_long[shortened]) < len( + xxoption + ): + short_long[shortened] = xxoption + # now short_long has been filled out to the longest with dashes + # **and** we keep the right option ordering from add_argument + for option in options: + if len(option) == 2 or option[2] == " ": + return_list.append(option) + if option[2:] == short_long.get(option.replace("-", "")): + return_list.append(option.replace(" ", "=", 1)) + action._formatted_action_invocation = ", ".join(return_list) + return action._formatted_action_invocation diff --git a/venv/lib/python2.7/site-packages/_pytest/config/exceptions.py b/venv/lib/python2.7/site-packages/_pytest/config/exceptions.py new file mode 100644 index 0000000..bf58fde --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/config/exceptions.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +class UsageError(Exception): + """ error in pytest usage or invocation""" + + +class PrintHelp(Exception): + """Raised when pytest should print it's help to skip the rest of the + argument parsing and validation.""" + + pass diff --git a/venv/lib/python2.7/site-packages/_pytest/config/findpaths.py b/venv/lib/python2.7/site-packages/_pytest/config/findpaths.py new file mode 100644 index 0000000..e6779b2 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/config/findpaths.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +import os + +import py + +from .exceptions import UsageError +from _pytest.outcomes import fail + + +def exists(path, ignore=EnvironmentError): + try: + return path.check() + except ignore: + return False + + +def getcfg(args, config=None): + """ + Search the list of arguments for a valid ini-file for pytest, + and return a tuple of (rootdir, inifile, cfg-dict). + + note: config is optional and used only to issue warnings explicitly (#2891). + """ + from _pytest.deprecated import CFG_PYTEST_SECTION + + inibasenames = ["pytest.ini", "tox.ini", "setup.cfg"] + args = [x for x in args if not str(x).startswith("-")] + if not args: + args = [py.path.local()] + for arg in args: + arg = py.path.local(arg) + for base in arg.parts(reverse=True): + for inibasename in inibasenames: + p = base.join(inibasename) + if exists(p): + try: + iniconfig = py.iniconfig.IniConfig(p) + except py.iniconfig.ParseError as exc: + raise UsageError(str(exc)) + + if ( + inibasename == "setup.cfg" + and "tool:pytest" in iniconfig.sections + ): + return base, p, iniconfig["tool:pytest"] + elif "pytest" in iniconfig.sections: + if inibasename == "setup.cfg" and config is not None: + + fail( + CFG_PYTEST_SECTION.format(filename=inibasename), + pytrace=False, + ) + return base, p, iniconfig["pytest"] + elif inibasename == "pytest.ini": + # allowed to be empty + return base, p, {} + return None, None, None + + +def get_common_ancestor(paths): + common_ancestor = None + for path in paths: + if not path.exists(): + continue + if common_ancestor is None: + common_ancestor = path + else: + if path.relto(common_ancestor) or path == common_ancestor: + continue + elif common_ancestor.relto(path): + common_ancestor = path + else: + shared = path.common(common_ancestor) + if shared is not None: + common_ancestor = shared + if common_ancestor is None: + common_ancestor = py.path.local() + elif common_ancestor.isfile(): + common_ancestor = common_ancestor.dirpath() + return common_ancestor + + +def get_dirs_from_args(args): + def is_option(x): + return str(x).startswith("-") + + def get_file_part_from_node_id(x): + return str(x).split("::")[0] + + def get_dir_from_path(path): + if path.isdir(): + return path + return py.path.local(path.dirname) + + # These look like paths but may not exist + possible_paths = ( + py.path.local(get_file_part_from_node_id(arg)) + for arg in args + if not is_option(arg) + ) + + return [get_dir_from_path(path) for path in possible_paths if path.exists()] + + +def determine_setup(inifile, args, rootdir_cmd_arg=None, config=None): + dirs = get_dirs_from_args(args) + if inifile: + iniconfig = py.iniconfig.IniConfig(inifile) + is_cfg_file = str(inifile).endswith(".cfg") + sections = ["tool:pytest", "pytest"] if is_cfg_file else ["pytest"] + for section in sections: + try: + inicfg = iniconfig[section] + if is_cfg_file and section == "pytest" and config is not None: + from _pytest.deprecated import CFG_PYTEST_SECTION + + fail( + CFG_PYTEST_SECTION.format(filename=str(inifile)), pytrace=False + ) + break + except KeyError: + inicfg = None + if rootdir_cmd_arg is None: + rootdir = get_common_ancestor(dirs) + else: + ancestor = get_common_ancestor(dirs) + rootdir, inifile, inicfg = getcfg([ancestor], config=config) + if rootdir is None and rootdir_cmd_arg is None: + for possible_rootdir in ancestor.parts(reverse=True): + if possible_rootdir.join("setup.py").exists(): + rootdir = possible_rootdir + break + else: + if dirs != [ancestor]: + rootdir, inifile, inicfg = getcfg(dirs, config=config) + if rootdir is None: + if config is not None: + cwd = config.invocation_dir + else: + cwd = py.path.local() + rootdir = get_common_ancestor([cwd, ancestor]) + is_fs_root = os.path.splitdrive(str(rootdir))[1] == "/" + if is_fs_root: + rootdir = ancestor + if rootdir_cmd_arg: + rootdir = py.path.local(os.path.expandvars(rootdir_cmd_arg)) + if not rootdir.isdir(): + raise UsageError( + "Directory '{}' not found. Check your '--rootdir' option.".format( + rootdir + ) + ) + return rootdir, inifile, inicfg or {} diff --git a/venv/lib/python2.7/site-packages/_pytest/debugging.py b/venv/lib/python2.7/site-packages/_pytest/debugging.py new file mode 100644 index 0000000..99d35a5 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/debugging.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- +""" interactive debugging with PDB, the Python Debugger. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import argparse +import pdb +import sys +from doctest import UnexpectedException + +from _pytest import outcomes +from _pytest.config import hookimpl +from _pytest.config.exceptions import UsageError + + +def _validate_usepdb_cls(value): + """Validate syntax of --pdbcls option.""" + try: + modname, classname = value.split(":") + except ValueError: + raise argparse.ArgumentTypeError( + "{!r} is not in the format 'modname:classname'".format(value) + ) + return (modname, classname) + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group._addoption( + "--pdb", + dest="usepdb", + action="store_true", + help="start the interactive Python debugger on errors or KeyboardInterrupt.", + ) + group._addoption( + "--pdbcls", + dest="usepdb_cls", + metavar="modulename:classname", + type=_validate_usepdb_cls, + help="start a custom interactive Python debugger on errors. " + "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb", + ) + group._addoption( + "--trace", + dest="trace", + action="store_true", + help="Immediately break when running each test.", + ) + + +def pytest_configure(config): + if config.getvalue("trace"): + config.pluginmanager.register(PdbTrace(), "pdbtrace") + if config.getvalue("usepdb"): + config.pluginmanager.register(PdbInvoke(), "pdbinvoke") + + pytestPDB._saved.append( + (pdb.set_trace, pytestPDB._pluginmanager, pytestPDB._config) + ) + pdb.set_trace = pytestPDB.set_trace + pytestPDB._pluginmanager = config.pluginmanager + pytestPDB._config = config + + # NOTE: not using pytest_unconfigure, since it might get called although + # pytest_configure was not (if another plugin raises UsageError). + def fin(): + ( + pdb.set_trace, + pytestPDB._pluginmanager, + pytestPDB._config, + ) = pytestPDB._saved.pop() + + config._cleanup.append(fin) + + +class pytestPDB(object): + """ Pseudo PDB that defers to the real pdb. """ + + _pluginmanager = None + _config = None + _saved = [] + _recursive_debug = 0 + _wrapped_pdb_cls = None + + @classmethod + def _is_capturing(cls, capman): + if capman: + return capman.is_capturing() + return False + + @classmethod + def _import_pdb_cls(cls, capman): + if not cls._config: + # Happens when using pytest.set_trace outside of a test. + return pdb.Pdb + + usepdb_cls = cls._config.getvalue("usepdb_cls") + + if cls._wrapped_pdb_cls and cls._wrapped_pdb_cls[0] == usepdb_cls: + return cls._wrapped_pdb_cls[1] + + if usepdb_cls: + modname, classname = usepdb_cls + + try: + __import__(modname) + mod = sys.modules[modname] + + # Handle --pdbcls=pdb:pdb.Pdb (useful e.g. with pdbpp). + parts = classname.split(".") + pdb_cls = getattr(mod, parts[0]) + for part in parts[1:]: + pdb_cls = getattr(pdb_cls, part) + except Exception as exc: + value = ":".join((modname, classname)) + raise UsageError( + "--pdbcls: could not import {!r}: {}".format(value, exc) + ) + else: + pdb_cls = pdb.Pdb + + wrapped_cls = cls._get_pdb_wrapper_class(pdb_cls, capman) + cls._wrapped_pdb_cls = (usepdb_cls, wrapped_cls) + return wrapped_cls + + @classmethod + def _get_pdb_wrapper_class(cls, pdb_cls, capman): + import _pytest.config + + class PytestPdbWrapper(pdb_cls, object): + _pytest_capman = capman + _continued = False + + def do_debug(self, arg): + cls._recursive_debug += 1 + ret = super(PytestPdbWrapper, self).do_debug(arg) + cls._recursive_debug -= 1 + return ret + + def do_continue(self, arg): + ret = super(PytestPdbWrapper, self).do_continue(arg) + if cls._recursive_debug == 0: + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + capman = self._pytest_capman + capturing = pytestPDB._is_capturing(capman) + if capturing: + if capturing == "global": + tw.sep(">", "PDB continue (IO-capturing resumed)") + else: + tw.sep( + ">", + "PDB continue (IO-capturing resumed for %s)" + % capturing, + ) + capman.resume() + else: + tw.sep(">", "PDB continue") + cls._pluginmanager.hook.pytest_leave_pdb(config=cls._config, pdb=self) + self._continued = True + return ret + + do_c = do_cont = do_continue + + def do_quit(self, arg): + """Raise Exit outcome when quit command is used in pdb. + + This is a bit of a hack - it would be better if BdbQuit + could be handled, but this would require to wrap the + whole pytest run, and adjust the report etc. + """ + ret = super(PytestPdbWrapper, self).do_quit(arg) + + if cls._recursive_debug == 0: + outcomes.exit("Quitting debugger") + + return ret + + do_q = do_quit + do_exit = do_quit + + def setup(self, f, tb): + """Suspend on setup(). + + Needed after do_continue resumed, and entering another + breakpoint again. + """ + ret = super(PytestPdbWrapper, self).setup(f, tb) + if not ret and self._continued: + # pdb.setup() returns True if the command wants to exit + # from the interaction: do not suspend capturing then. + if self._pytest_capman: + self._pytest_capman.suspend_global_capture(in_=True) + return ret + + def get_stack(self, f, t): + stack, i = super(PytestPdbWrapper, self).get_stack(f, t) + if f is None: + # Find last non-hidden frame. + i = max(0, len(stack) - 1) + while i and stack[i][0].f_locals.get("__tracebackhide__", False): + i -= 1 + return stack, i + + return PytestPdbWrapper + + @classmethod + def _init_pdb(cls, method, *args, **kwargs): + """ Initialize PDB debugging, dropping any IO capturing. """ + import _pytest.config + + if cls._pluginmanager is not None: + capman = cls._pluginmanager.getplugin("capturemanager") + else: + capman = None + if capman: + capman.suspend(in_=True) + + if cls._config: + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + if cls._recursive_debug == 0: + # Handle header similar to pdb.set_trace in py37+. + header = kwargs.pop("header", None) + if header is not None: + tw.sep(">", header) + else: + capturing = cls._is_capturing(capman) + if capturing == "global": + tw.sep(">", "PDB %s (IO-capturing turned off)" % (method,)) + elif capturing: + tw.sep( + ">", + "PDB %s (IO-capturing turned off for %s)" + % (method, capturing), + ) + else: + tw.sep(">", "PDB %s" % (method,)) + + _pdb = cls._import_pdb_cls(capman)(**kwargs) + + if cls._pluginmanager: + cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config, pdb=_pdb) + return _pdb + + @classmethod + def set_trace(cls, *args, **kwargs): + """Invoke debugging via ``Pdb.set_trace``, dropping any IO capturing.""" + frame = sys._getframe().f_back + _pdb = cls._init_pdb("set_trace", *args, **kwargs) + _pdb.set_trace(frame) + + +class PdbInvoke(object): + def pytest_exception_interact(self, node, call, report): + capman = node.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stdout.write(err) + _enter_pdb(node, call.excinfo, report) + + def pytest_internalerror(self, excrepr, excinfo): + tb = _postmortem_traceback(excinfo) + post_mortem(tb) + + +class PdbTrace(object): + @hookimpl(hookwrapper=True) + def pytest_pyfunc_call(self, pyfuncitem): + _test_pytest_function(pyfuncitem) + yield + + +def _test_pytest_function(pyfuncitem): + _pdb = pytestPDB._init_pdb("runcall") + testfunction = pyfuncitem.obj + pyfuncitem.obj = _pdb.runcall + if "func" in pyfuncitem._fixtureinfo.argnames: # pragma: no branch + raise ValueError("--trace can't be used with a fixture named func!") + pyfuncitem.funcargs["func"] = testfunction + new_list = list(pyfuncitem._fixtureinfo.argnames) + new_list.append("func") + pyfuncitem._fixtureinfo.argnames = tuple(new_list) + + +def _enter_pdb(node, excinfo, rep): + # XXX we re-use the TerminalReporter's terminalwriter + # because this seems to avoid some encoding related troubles + # for not completely clear reasons. + tw = node.config.pluginmanager.getplugin("terminalreporter")._tw + tw.line() + + showcapture = node.config.option.showcapture + + for sectionname, content in ( + ("stdout", rep.capstdout), + ("stderr", rep.capstderr), + ("log", rep.caplog), + ): + if showcapture in (sectionname, "all") and content: + tw.sep(">", "captured " + sectionname) + if content[-1:] == "\n": + content = content[:-1] + tw.line(content) + + tw.sep(">", "traceback") + rep.toterminal(tw) + tw.sep(">", "entering PDB") + tb = _postmortem_traceback(excinfo) + rep._pdbshown = True + post_mortem(tb) + return rep + + +def _postmortem_traceback(excinfo): + if isinstance(excinfo.value, UnexpectedException): + # A doctest.UnexpectedException is not useful for post_mortem. + # Use the underlying exception instead: + return excinfo.value.exc_info[2] + else: + return excinfo._excinfo[2] + + +def post_mortem(t): + p = pytestPDB._init_pdb("post_mortem") + p.reset() + p.interaction(None, t) + if p.quitting: + outcomes.exit("Quitting debugger") diff --git a/venv/lib/python2.7/site-packages/_pytest/deprecated.py b/venv/lib/python2.7/site-packages/_pytest/deprecated.py new file mode 100644 index 0000000..12394ac --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/deprecated.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +""" +This module contains deprecation messages and bits of code used elsewhere in the codebase +that is planned to be removed in the next pytest release. + +Keeping it in a central location makes it easy to track what is deprecated and should +be removed when the time comes. + +All constants defined in this module should be either PytestWarning instances or UnformattedWarning +in case of warnings which need to format their messages. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from _pytest.warning_types import PytestDeprecationWarning +from _pytest.warning_types import RemovedInPytest4Warning +from _pytest.warning_types import UnformattedWarning + +YIELD_TESTS = "yield tests were removed in pytest 4.0 - {name} will be ignored" + + +FIXTURE_FUNCTION_CALL = ( + 'Fixture "{name}" called directly. Fixtures are not meant to be called directly,\n' + "but are created automatically when test functions request them as parameters.\n" + "See https://docs.pytest.org/en/latest/fixture.html for more information about fixtures, and\n" + "https://docs.pytest.org/en/latest/deprecations.html#calling-fixtures-directly about how to update your code." +) + +FIXTURE_NAMED_REQUEST = PytestDeprecationWarning( + "'request' is a reserved name for fixtures and will raise an error in future versions" +) + +CFG_PYTEST_SECTION = "[pytest] section in {filename} files is no longer supported, change to [tool:pytest] instead." + +GETFUNCARGVALUE = RemovedInPytest4Warning( + "getfuncargvalue is deprecated, use getfixturevalue" +) + +RAISES_MESSAGE_PARAMETER = PytestDeprecationWarning( + "The 'message' parameter is deprecated.\n" + "(did you mean to use `match='some regex'` to check the exception message?)\n" + "Please see:\n" + " https://docs.pytest.org/en/4.6-maintenance/deprecations.html#message-parameter-of-pytest-raises" +) + +RESULT_LOG = PytestDeprecationWarning( + "--result-log is deprecated and scheduled for removal in pytest 5.0.\n" + "See https://docs.pytest.org/en/latest/deprecations.html#result-log-result-log for more information." +) + +RAISES_EXEC = PytestDeprecationWarning( + "raises(..., 'code(as_a_string)') is deprecated, use the context manager form or use `exec()` directly\n\n" + "See https://docs.pytest.org/en/latest/deprecations.html#raises-warns-exec" +) +WARNS_EXEC = PytestDeprecationWarning( + "warns(..., 'code(as_a_string)') is deprecated, use the context manager form or use `exec()` directly.\n\n" + "See https://docs.pytest.org/en/latest/deprecations.html#raises-warns-exec" +) + +PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST = ( + "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported " + "because it affects the entire directory tree in a non-explicit way.\n" + " {}\n" + "Please move it to a top level conftest file at the rootdir:\n" + " {}\n" + "For more information, visit:\n" + " https://docs.pytest.org/en/latest/deprecations.html#pytest-plugins-in-non-top-level-conftest-files" +) + +PYTEST_CONFIG_GLOBAL = PytestDeprecationWarning( + "the `pytest.config` global is deprecated. Please use `request.config` " + "or `pytest_configure` (if you're a pytest plugin) instead." +) + +PYTEST_ENSURETEMP = RemovedInPytest4Warning( + "pytest/tmpdir_factory.ensuretemp is deprecated, \n" + "please use the tmp_path fixture or tmp_path_factory.mktemp" +) + +PYTEST_LOGWARNING = PytestDeprecationWarning( + "pytest_logwarning is deprecated, no longer being called, and will be removed soon\n" + "please use pytest_warning_captured instead" +) + +PYTEST_WARNS_UNKNOWN_KWARGS = UnformattedWarning( + PytestDeprecationWarning, + "pytest.warns() got unexpected keyword arguments: {args!r}.\n" + "This will be an error in future versions.", +) + +PYTEST_PARAM_UNKNOWN_KWARGS = UnformattedWarning( + PytestDeprecationWarning, + "pytest.param() got unexpected keyword arguments: {args!r}.\n" + "This will be an error in future versions.", +) diff --git a/venv/lib/python2.7/site-packages/_pytest/doctest.py b/venv/lib/python2.7/site-packages/_pytest/doctest.py new file mode 100644 index 0000000..659d24a --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/doctest.py @@ -0,0 +1,583 @@ +# -*- coding: utf-8 -*- +""" discover and run doctests in modules and test files.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import inspect +import platform +import sys +import traceback +import warnings +from contextlib import contextmanager + +import pytest +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import TerminalRepr +from _pytest.compat import safe_getattr +from _pytest.fixtures import FixtureRequest +from _pytest.outcomes import Skipped +from _pytest.warning_types import PytestWarning + +DOCTEST_REPORT_CHOICE_NONE = "none" +DOCTEST_REPORT_CHOICE_CDIFF = "cdiff" +DOCTEST_REPORT_CHOICE_NDIFF = "ndiff" +DOCTEST_REPORT_CHOICE_UDIFF = "udiff" +DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure" + +DOCTEST_REPORT_CHOICES = ( + DOCTEST_REPORT_CHOICE_NONE, + DOCTEST_REPORT_CHOICE_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF, + DOCTEST_REPORT_CHOICE_UDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE, +) + +# Lazy definition of runner class +RUNNER_CLASS = None + + +def pytest_addoption(parser): + parser.addini( + "doctest_optionflags", + "option flags for doctests", + type="args", + default=["ELLIPSIS"], + ) + parser.addini( + "doctest_encoding", "encoding used for doctest files", default="utf-8" + ) + group = parser.getgroup("collect") + group.addoption( + "--doctest-modules", + action="store_true", + default=False, + help="run doctests in all .py modules", + dest="doctestmodules", + ) + group.addoption( + "--doctest-report", + type=str.lower, + default="udiff", + help="choose another output format for diffs on doctest failure", + choices=DOCTEST_REPORT_CHOICES, + dest="doctestreport", + ) + group.addoption( + "--doctest-glob", + action="append", + default=[], + metavar="pat", + help="doctests file matching pattern, default: test*.txt", + dest="doctestglob", + ) + group.addoption( + "--doctest-ignore-import-errors", + action="store_true", + default=False, + help="ignore doctest ImportErrors", + dest="doctest_ignore_import_errors", + ) + group.addoption( + "--doctest-continue-on-failure", + action="store_true", + default=False, + help="for a given doctest, continue to run after the first failure", + dest="doctest_continue_on_failure", + ) + + +def pytest_collect_file(path, parent): + config = parent.config + if path.ext == ".py": + if config.option.doctestmodules and not _is_setup_py(config, path, parent): + return DoctestModule(path, parent) + elif _is_doctest(config, path, parent): + return DoctestTextfile(path, parent) + + +def _is_setup_py(config, path, parent): + if path.basename != "setup.py": + return False + contents = path.read() + return "setuptools" in contents or "distutils" in contents + + +def _is_doctest(config, path, parent): + if path.ext in (".txt", ".rst") and parent.session.isinitpath(path): + return True + globs = config.getoption("doctestglob") or ["test*.txt"] + for glob in globs: + if path.check(fnmatch=glob): + return True + return False + + +class ReprFailDoctest(TerminalRepr): + def __init__(self, reprlocation_lines): + # List of (reprlocation, lines) tuples + self.reprlocation_lines = reprlocation_lines + + def toterminal(self, tw): + for reprlocation, lines in self.reprlocation_lines: + for line in lines: + tw.line(line) + reprlocation.toterminal(tw) + + +class MultipleDoctestFailures(Exception): + def __init__(self, failures): + super(MultipleDoctestFailures, self).__init__() + self.failures = failures + + +def _init_runner_class(): + import doctest + + class PytestDoctestRunner(doctest.DebugRunner): + """ + Runner to collect failures. Note that the out variable in this case is + a list instead of a stdout-like object + """ + + def __init__( + self, checker=None, verbose=None, optionflags=0, continue_on_failure=True + ): + doctest.DebugRunner.__init__( + self, checker=checker, verbose=verbose, optionflags=optionflags + ) + self.continue_on_failure = continue_on_failure + + def report_failure(self, out, test, example, got): + failure = doctest.DocTestFailure(test, example, got) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + def report_unexpected_exception(self, out, test, example, exc_info): + if isinstance(exc_info[1], Skipped): + raise exc_info[1] + failure = doctest.UnexpectedException(test, example, exc_info) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + return PytestDoctestRunner + + +def _get_runner(checker=None, verbose=None, optionflags=0, continue_on_failure=True): + # We need this in order to do a lazy import on doctest + global RUNNER_CLASS + if RUNNER_CLASS is None: + RUNNER_CLASS = _init_runner_class() + return RUNNER_CLASS( + checker=checker, + verbose=verbose, + optionflags=optionflags, + continue_on_failure=continue_on_failure, + ) + + +class DoctestItem(pytest.Item): + def __init__(self, name, parent, runner=None, dtest=None): + super(DoctestItem, self).__init__(name, parent) + self.runner = runner + self.dtest = dtest + self.obj = None + self.fixture_request = None + + def setup(self): + if self.dtest is not None: + self.fixture_request = _setup_fixtures(self) + globs = dict(getfixture=self.fixture_request.getfixturevalue) + for name, value in self.fixture_request.getfixturevalue( + "doctest_namespace" + ).items(): + globs[name] = value + self.dtest.globs.update(globs) + + def runtest(self): + _check_all_skipped(self.dtest) + self._disable_output_capturing_for_darwin() + failures = [] + self.runner.run(self.dtest, out=failures) + if failures: + raise MultipleDoctestFailures(failures) + + def _disable_output_capturing_for_darwin(self): + """ + Disable output capturing. Otherwise, stdout is lost to doctest (#985) + """ + if platform.system() != "Darwin": + return + capman = self.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + + def repr_failure(self, excinfo): + import doctest + + failures = None + if excinfo.errisinstance((doctest.DocTestFailure, doctest.UnexpectedException)): + failures = [excinfo.value] + elif excinfo.errisinstance(MultipleDoctestFailures): + failures = excinfo.value.failures + + if failures is not None: + reprlocation_lines = [] + for failure in failures: + example = failure.example + test = failure.test + filename = test.filename + if test.lineno is None: + lineno = None + else: + lineno = test.lineno + example.lineno + 1 + message = type(failure).__name__ + reprlocation = ReprFileLocation(filename, lineno, message) + checker = _get_checker() + report_choice = _get_report_choice( + self.config.getoption("doctestreport") + ) + if lineno is not None: + lines = failure.test.docstring.splitlines(False) + # add line numbers to the left of the error message + lines = [ + "%03d %s" % (i + test.lineno + 1, x) + for (i, x) in enumerate(lines) + ] + # trim docstring error lines to 10 + lines = lines[max(example.lineno - 9, 0) : example.lineno + 1] + else: + lines = [ + "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example" + ] + indent = ">>>" + for line in example.source.splitlines(): + lines.append("??? %s %s" % (indent, line)) + indent = "..." + if isinstance(failure, doctest.DocTestFailure): + lines += checker.output_difference( + example, failure.got, report_choice + ).split("\n") + else: + inner_excinfo = ExceptionInfo(failure.exc_info) + lines += ["UNEXPECTED EXCEPTION: %s" % repr(inner_excinfo.value)] + lines += traceback.format_exception(*failure.exc_info) + reprlocation_lines.append((reprlocation, lines)) + return ReprFailDoctest(reprlocation_lines) + else: + return super(DoctestItem, self).repr_failure(excinfo) + + def reportinfo(self): + return self.fspath, self.dtest.lineno, "[doctest] %s" % self.name + + +def _get_flag_lookup(): + import doctest + + return dict( + DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1, + DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE, + NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE, + ELLIPSIS=doctest.ELLIPSIS, + IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL, + COMPARISON_FLAGS=doctest.COMPARISON_FLAGS, + ALLOW_UNICODE=_get_allow_unicode_flag(), + ALLOW_BYTES=_get_allow_bytes_flag(), + ) + + +def get_optionflags(parent): + optionflags_str = parent.config.getini("doctest_optionflags") + flag_lookup_table = _get_flag_lookup() + flag_acc = 0 + for flag in optionflags_str: + flag_acc |= flag_lookup_table[flag] + return flag_acc + + +def _get_continue_on_failure(config): + continue_on_failure = config.getvalue("doctest_continue_on_failure") + if continue_on_failure: + # We need to turn off this if we use pdb since we should stop at + # the first failure + if config.getvalue("usepdb"): + continue_on_failure = False + return continue_on_failure + + +class DoctestTextfile(pytest.Module): + obj = None + + def collect(self): + import doctest + + # inspired by doctest.testfile; ideally we would use it directly, + # but it doesn't support passing a custom checker + encoding = self.config.getini("doctest_encoding") + text = self.fspath.read_text(encoding) + filename = str(self.fspath) + name = self.fspath.basename + globs = {"__name__": "__main__"} + + optionflags = get_optionflags(self) + + runner = _get_runner( + verbose=0, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + _fix_spoof_python2(runner, encoding) + + parser = doctest.DocTestParser() + test = parser.get_doctest(text, globs, name, filename, 0) + if test.examples: + yield DoctestItem(test.name, self, runner, test) + + +def _check_all_skipped(test): + """raises pytest.skip() if all examples in the given DocTest have the SKIP + option set. + """ + import doctest + + all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples) + if all_skipped: + pytest.skip("all tests skipped by +SKIP option") + + +def _is_mocked(obj): + """ + returns if a object is possibly a mock object by checking the existence of a highly improbable attribute + """ + return ( + safe_getattr(obj, "pytest_mock_example_attribute_that_shouldnt_exist", None) + is not None + ) + + +@contextmanager +def _patch_unwrap_mock_aware(): + """ + contextmanager which replaces ``inspect.unwrap`` with a version + that's aware of mock objects and doesn't recurse on them + """ + real_unwrap = getattr(inspect, "unwrap", None) + if real_unwrap is None: + yield + else: + + def _mock_aware_unwrap(obj, stop=None): + try: + if stop is None or stop is _is_mocked: + return real_unwrap(obj, stop=_is_mocked) + return real_unwrap(obj, stop=lambda obj: _is_mocked(obj) or stop(obj)) + except Exception as e: + warnings.warn( + "Got %r when unwrapping %r. This is usually caused " + "by a violation of Python's object protocol; see e.g. " + "https://github.com/pytest-dev/pytest/issues/5080" % (e, obj), + PytestWarning, + ) + raise + + inspect.unwrap = _mock_aware_unwrap + try: + yield + finally: + inspect.unwrap = real_unwrap + + +class DoctestModule(pytest.Module): + def collect(self): + import doctest + + class MockAwareDocTestFinder(doctest.DocTestFinder): + """ + a hackish doctest finder that overrides stdlib internals to fix a stdlib bug + + https://github.com/pytest-dev/pytest/issues/3456 + https://bugs.python.org/issue25532 + """ + + def _find(self, tests, obj, name, module, source_lines, globs, seen): + if _is_mocked(obj): + return + with _patch_unwrap_mock_aware(): + + doctest.DocTestFinder._find( + self, tests, obj, name, module, source_lines, globs, seen + ) + + if self.fspath.basename == "conftest.py": + module = self.config.pluginmanager._importconftest(self.fspath) + else: + try: + module = self.fspath.pyimport() + except ImportError: + if self.config.getvalue("doctest_ignore_import_errors"): + pytest.skip("unable to import module %r" % self.fspath) + else: + raise + # uses internal doctest module parsing mechanism + finder = MockAwareDocTestFinder() + optionflags = get_optionflags(self) + runner = _get_runner( + verbose=0, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + for test in finder.find(module, module.__name__): + if test.examples: # skip empty doctests + yield DoctestItem(test.name, self, runner, test) + + +def _setup_fixtures(doctest_item): + """ + Used by DoctestTextfile and DoctestItem to setup fixture information. + """ + + def func(): + pass + + doctest_item.funcargs = {} + fm = doctest_item.session._fixturemanager + doctest_item._fixtureinfo = fm.getfixtureinfo( + node=doctest_item, func=func, cls=None, funcargs=False + ) + fixture_request = FixtureRequest(doctest_item) + fixture_request._fillfixtures() + return fixture_request + + +def _get_checker(): + """ + Returns a doctest.OutputChecker subclass that takes in account the + ALLOW_UNICODE option to ignore u'' prefixes in strings and ALLOW_BYTES + to strip b'' prefixes. + Useful when the same doctest should run in Python 2 and Python 3. + + An inner class is used to avoid importing "doctest" at the module + level. + """ + if hasattr(_get_checker, "LiteralsOutputChecker"): + return _get_checker.LiteralsOutputChecker() + + import doctest + import re + + class LiteralsOutputChecker(doctest.OutputChecker): + """ + Copied from doctest_nose_plugin.py from the nltk project: + https://github.com/nltk/nltk + + Further extended to also support byte literals. + """ + + _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE) + _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE) + + def check_output(self, want, got, optionflags): + res = doctest.OutputChecker.check_output(self, want, got, optionflags) + if res: + return True + + allow_unicode = optionflags & _get_allow_unicode_flag() + allow_bytes = optionflags & _get_allow_bytes_flag() + if not allow_unicode and not allow_bytes: + return False + + else: # pragma: no cover + + def remove_prefixes(regex, txt): + return re.sub(regex, r"\1\2", txt) + + if allow_unicode: + want = remove_prefixes(self._unicode_literal_re, want) + got = remove_prefixes(self._unicode_literal_re, got) + if allow_bytes: + want = remove_prefixes(self._bytes_literal_re, want) + got = remove_prefixes(self._bytes_literal_re, got) + res = doctest.OutputChecker.check_output(self, want, got, optionflags) + return res + + _get_checker.LiteralsOutputChecker = LiteralsOutputChecker + return _get_checker.LiteralsOutputChecker() + + +def _get_allow_unicode_flag(): + """ + Registers and returns the ALLOW_UNICODE flag. + """ + import doctest + + return doctest.register_optionflag("ALLOW_UNICODE") + + +def _get_allow_bytes_flag(): + """ + Registers and returns the ALLOW_BYTES flag. + """ + import doctest + + return doctest.register_optionflag("ALLOW_BYTES") + + +def _get_report_choice(key): + """ + This function returns the actual `doctest` module flag value, we want to do it as late as possible to avoid + importing `doctest` and all its dependencies when parsing options, as it adds overhead and breaks tests. + """ + import doctest + + return { + DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF, + DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE, + DOCTEST_REPORT_CHOICE_NONE: 0, + }[key] + + +def _fix_spoof_python2(runner, encoding): + """ + Installs a "SpoofOut" into the given DebugRunner so it properly deals with unicode output. This + should patch only doctests for text files because they don't have a way to declare their + encoding. Doctests in docstrings from Python modules don't have the same problem given that + Python already decoded the strings. + + This fixes the problem related in issue #2434. + """ + from _pytest.compat import _PY2 + + if not _PY2: + return + + from doctest import _SpoofOut + + class UnicodeSpoof(_SpoofOut): + def getvalue(self): + result = _SpoofOut.getvalue(self) + if encoding and isinstance(result, bytes): + result = result.decode(encoding) + return result + + runner._fakeout = UnicodeSpoof() + + +@pytest.fixture(scope="session") +def doctest_namespace(): + """ + Fixture that returns a :py:class:`dict` that will be injected into the namespace of doctests. + """ + return dict() diff --git a/venv/lib/python2.7/site-packages/_pytest/fixtures.py b/venv/lib/python2.7/site-packages/_pytest/fixtures.py new file mode 100644 index 0000000..280a486 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/fixtures.py @@ -0,0 +1,1356 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import functools +import inspect +import itertools +import sys +import warnings +from collections import defaultdict +from collections import deque +from collections import OrderedDict + +import attr +import py +import six + +import _pytest +from _pytest import nodes +from _pytest._code.code import FormattedExcinfo +from _pytest._code.code import TerminalRepr +from _pytest.compat import _format_args +from _pytest.compat import _PytestWrapper +from _pytest.compat import exc_clear +from _pytest.compat import FuncargnamesCompatAttr +from _pytest.compat import get_real_func +from _pytest.compat import get_real_method +from _pytest.compat import getfslineno +from _pytest.compat import getfuncargnames +from _pytest.compat import getimfunc +from _pytest.compat import getlocation +from _pytest.compat import is_generator +from _pytest.compat import isclass +from _pytest.compat import NOTSET +from _pytest.compat import safe_getattr +from _pytest.deprecated import FIXTURE_FUNCTION_CALL +from _pytest.deprecated import FIXTURE_NAMED_REQUEST +from _pytest.outcomes import fail +from _pytest.outcomes import TEST_OUTCOME + + +@attr.s(frozen=True) +class PseudoFixtureDef(object): + cached_result = attr.ib() + scope = attr.ib() + + +def pytest_sessionstart(session): + import _pytest.python + import _pytest.nodes + + scopename2class.update( + { + "package": _pytest.python.Package, + "class": _pytest.python.Class, + "module": _pytest.python.Module, + "function": _pytest.nodes.Item, + "session": _pytest.main.Session, + } + ) + session._fixturemanager = FixtureManager(session) + + +scopename2class = {} + + +scope2props = dict(session=()) +scope2props["package"] = ("fspath",) +scope2props["module"] = ("fspath", "module") +scope2props["class"] = scope2props["module"] + ("cls",) +scope2props["instance"] = scope2props["class"] + ("instance",) +scope2props["function"] = scope2props["instance"] + ("function", "keywords") + + +def scopeproperty(name=None, doc=None): + def decoratescope(func): + scopename = name or func.__name__ + + def provide(self): + if func.__name__ in scope2props[self.scope]: + return func(self) + raise AttributeError( + "%s not available in %s-scoped context" % (scopename, self.scope) + ) + + return property(provide, None, None, func.__doc__) + + return decoratescope + + +def get_scope_package(node, fixturedef): + import pytest + + cls = pytest.Package + current = node + fixture_package_name = "%s/%s" % (fixturedef.baseid, "__init__.py") + while current and ( + type(current) is not cls or fixture_package_name != current.nodeid + ): + current = current.parent + if current is None: + return node.session + return current + + +def get_scope_node(node, scope): + cls = scopename2class.get(scope) + if cls is None: + raise ValueError("unknown scope") + return node.getparent(cls) + + +def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager): + # this function will transform all collected calls to a functions + # if they use direct funcargs (i.e. direct parametrization) + # because we want later test execution to be able to rely on + # an existing FixtureDef structure for all arguments. + # XXX we can probably avoid this algorithm if we modify CallSpec2 + # to directly care for creating the fixturedefs within its methods. + if not metafunc._calls[0].funcargs: + return # this function call does not have direct parametrization + # collect funcargs of all callspecs into a list of values + arg2params = {} + arg2scope = {} + for callspec in metafunc._calls: + for argname, argvalue in callspec.funcargs.items(): + assert argname not in callspec.params + callspec.params[argname] = argvalue + arg2params_list = arg2params.setdefault(argname, []) + callspec.indices[argname] = len(arg2params_list) + arg2params_list.append(argvalue) + if argname not in arg2scope: + scopenum = callspec._arg2scopenum.get(argname, scopenum_function) + arg2scope[argname] = scopes[scopenum] + callspec.funcargs.clear() + + # register artificial FixtureDef's so that later at test execution + # time we can rely on a proper FixtureDef to exist for fixture setup. + arg2fixturedefs = metafunc._arg2fixturedefs + for argname, valuelist in arg2params.items(): + # if we have a scope that is higher than function we need + # to make sure we only ever create an according fixturedef on + # a per-scope basis. We thus store and cache the fixturedef on the + # node related to the scope. + scope = arg2scope[argname] + node = None + if scope != "function": + node = get_scope_node(collector, scope) + if node is None: + assert scope == "class" and isinstance(collector, _pytest.python.Module) + # use module-level collector for class-scope (for now) + node = collector + if node and argname in node._name2pseudofixturedef: + arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]] + else: + fixturedef = FixtureDef( + fixturemanager, + "", + argname, + get_direct_param_fixture_func, + arg2scope[argname], + valuelist, + False, + False, + ) + arg2fixturedefs[argname] = [fixturedef] + if node is not None: + node._name2pseudofixturedef[argname] = fixturedef + + +def getfixturemarker(obj): + """ return fixturemarker or None if it doesn't exist or raised + exceptions.""" + try: + return getattr(obj, "_pytestfixturefunction", None) + except TEST_OUTCOME: + # some objects raise errors like request (from flask import request) + # we don't expect them to be fixture functions + return None + + +def get_parametrized_fixture_keys(item, scopenum): + """ return list of keys for all parametrized arguments which match + the specified scope. """ + assert scopenum < scopenum_function # function + try: + cs = item.callspec + except AttributeError: + pass + else: + # cs.indices.items() is random order of argnames. Need to + # sort this so that different calls to + # get_parametrized_fixture_keys will be deterministic. + for argname, param_index in sorted(cs.indices.items()): + if cs._arg2scopenum[argname] != scopenum: + continue + if scopenum == 0: # session + key = (argname, param_index) + elif scopenum == 1: # package + key = (argname, param_index, item.fspath.dirpath()) + elif scopenum == 2: # module + key = (argname, param_index, item.fspath) + elif scopenum == 3: # class + key = (argname, param_index, item.fspath, item.cls) + yield key + + +# algorithm for sorting on a per-parametrized resource setup basis +# it is called for scopenum==0 (session) first and performs sorting +# down to the lower scopes such as to minimize number of "high scope" +# setups and teardowns + + +def reorder_items(items): + argkeys_cache = {} + items_by_argkey = {} + for scopenum in range(0, scopenum_function): + argkeys_cache[scopenum] = d = {} + items_by_argkey[scopenum] = item_d = defaultdict(deque) + for item in items: + keys = OrderedDict.fromkeys(get_parametrized_fixture_keys(item, scopenum)) + if keys: + d[item] = keys + for key in keys: + item_d[key].append(item) + items = OrderedDict.fromkeys(items) + return list(reorder_items_atscope(items, argkeys_cache, items_by_argkey, 0)) + + +def fix_cache_order(item, argkeys_cache, items_by_argkey): + for scopenum in range(0, scopenum_function): + for key in argkeys_cache[scopenum].get(item, []): + items_by_argkey[scopenum][key].appendleft(item) + + +def reorder_items_atscope(items, argkeys_cache, items_by_argkey, scopenum): + if scopenum >= scopenum_function or len(items) < 3: + return items + ignore = set() + items_deque = deque(items) + items_done = OrderedDict() + scoped_items_by_argkey = items_by_argkey[scopenum] + scoped_argkeys_cache = argkeys_cache[scopenum] + while items_deque: + no_argkey_group = OrderedDict() + slicing_argkey = None + while items_deque: + item = items_deque.popleft() + if item in items_done or item in no_argkey_group: + continue + argkeys = OrderedDict.fromkeys( + k for k in scoped_argkeys_cache.get(item, []) if k not in ignore + ) + if not argkeys: + no_argkey_group[item] = None + else: + slicing_argkey, _ = argkeys.popitem() + # we don't have to remove relevant items from later in the deque because they'll just be ignored + matching_items = [ + i for i in scoped_items_by_argkey[slicing_argkey] if i in items + ] + for i in reversed(matching_items): + fix_cache_order(i, argkeys_cache, items_by_argkey) + items_deque.appendleft(i) + break + if no_argkey_group: + no_argkey_group = reorder_items_atscope( + no_argkey_group, argkeys_cache, items_by_argkey, scopenum + 1 + ) + for item in no_argkey_group: + items_done[item] = None + ignore.add(slicing_argkey) + return items_done + + +def fillfixtures(function): + """ fill missing funcargs for a test function. """ + try: + request = function._request + except AttributeError: + # XXX this special code path is only expected to execute + # with the oejskit plugin. It uses classes with funcargs + # and we thus have to work a bit to allow this. + fm = function.session._fixturemanager + fi = fm.getfixtureinfo(function.parent, function.obj, None) + function._fixtureinfo = fi + request = function._request = FixtureRequest(function) + request._fillfixtures() + # prune out funcargs for jstests + newfuncargs = {} + for name in fi.argnames: + newfuncargs[name] = function.funcargs[name] + function.funcargs = newfuncargs + else: + request._fillfixtures() + + +def get_direct_param_fixture_func(request): + return request.param + + +@attr.s(slots=True) +class FuncFixtureInfo(object): + # original function argument names + argnames = attr.ib(type=tuple) + # argnames that function immediately requires. These include argnames + + # fixture names specified via usefixtures and via autouse=True in fixture + # definitions. + initialnames = attr.ib(type=tuple) + names_closure = attr.ib() # List[str] + name2fixturedefs = attr.ib() # List[str, List[FixtureDef]] + + def prune_dependency_tree(self): + """Recompute names_closure from initialnames and name2fixturedefs + + Can only reduce names_closure, which means that the new closure will + always be a subset of the old one. The order is preserved. + + This method is needed because direct parametrization may shadow some + of the fixtures that were included in the originally built dependency + tree. In this way the dependency tree can get pruned, and the closure + of argnames may get reduced. + """ + closure = set() + working_set = set(self.initialnames) + while working_set: + argname = working_set.pop() + # argname may be smth not included in the original names_closure, + # in which case we ignore it. This currently happens with pseudo + # FixtureDefs which wrap 'get_direct_param_fixture_func(request)'. + # So they introduce the new dependency 'request' which might have + # been missing in the original tree (closure). + if argname not in closure and argname in self.names_closure: + closure.add(argname) + if argname in self.name2fixturedefs: + working_set.update(self.name2fixturedefs[argname][-1].argnames) + + self.names_closure[:] = sorted(closure, key=self.names_closure.index) + + +class FixtureRequest(FuncargnamesCompatAttr): + """ A request for a fixture from a test or fixture function. + + A request object gives access to the requesting test context + and has an optional ``param`` attribute in case + the fixture is parametrized indirectly. + """ + + def __init__(self, pyfuncitem): + self._pyfuncitem = pyfuncitem + #: fixture for which this request is being performed + self.fixturename = None + #: Scope string, one of "function", "class", "module", "session" + self.scope = "function" + self._fixture_defs = {} # argname -> FixtureDef + fixtureinfo = pyfuncitem._fixtureinfo + self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy() + self._arg2index = {} + self._fixturemanager = pyfuncitem.session._fixturemanager + + @property + def fixturenames(self): + """names of all active fixtures in this request""" + result = list(self._pyfuncitem._fixtureinfo.names_closure) + result.extend(set(self._fixture_defs).difference(result)) + return result + + @property + def node(self): + """ underlying collection node (depends on current request scope)""" + return self._getscopeitem(self.scope) + + def _getnextfixturedef(self, argname): + fixturedefs = self._arg2fixturedefs.get(argname, None) + if fixturedefs is None: + # we arrive here because of a dynamic call to + # getfixturevalue(argname) usage which was naturally + # not known at parsing/collection time + parentid = self._pyfuncitem.parent.nodeid + fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid) + self._arg2fixturedefs[argname] = fixturedefs + # fixturedefs list is immutable so we maintain a decreasing index + index = self._arg2index.get(argname, 0) - 1 + if fixturedefs is None or (-index > len(fixturedefs)): + raise FixtureLookupError(argname, self) + self._arg2index[argname] = index + return fixturedefs[index] + + @property + def config(self): + """ the pytest config object associated with this request. """ + return self._pyfuncitem.config + + @scopeproperty() + def function(self): + """ test function object if the request has a per-function scope. """ + return self._pyfuncitem.obj + + @scopeproperty("class") + def cls(self): + """ class (can be None) where the test function was collected. """ + clscol = self._pyfuncitem.getparent(_pytest.python.Class) + if clscol: + return clscol.obj + + @property + def instance(self): + """ instance (can be None) on which test function was collected. """ + # unittest support hack, see _pytest.unittest.TestCaseFunction + try: + return self._pyfuncitem._testcase + except AttributeError: + function = getattr(self, "function", None) + return getattr(function, "__self__", None) + + @scopeproperty() + def module(self): + """ python module object where the test function was collected. """ + return self._pyfuncitem.getparent(_pytest.python.Module).obj + + @scopeproperty() + def fspath(self): + """ the file system path of the test module which collected this test. """ + return self._pyfuncitem.fspath + + @property + def keywords(self): + """ keywords/markers dictionary for the underlying node. """ + return self.node.keywords + + @property + def session(self): + """ pytest session object. """ + return self._pyfuncitem.session + + def addfinalizer(self, finalizer): + """ add finalizer/teardown function to be called after the + last test within the requesting test context finished + execution. """ + # XXX usually this method is shadowed by fixturedef specific ones + self._addfinalizer(finalizer, scope=self.scope) + + def _addfinalizer(self, finalizer, scope): + colitem = self._getscopeitem(scope) + self._pyfuncitem.session._setupstate.addfinalizer( + finalizer=finalizer, colitem=colitem + ) + + def applymarker(self, marker): + """ Apply a marker to a single test function invocation. + This method is useful if you don't want to have a keyword/marker + on all function invocations. + + :arg marker: a :py:class:`_pytest.mark.MarkDecorator` object + created by a call to ``pytest.mark.NAME(...)``. + """ + self.node.add_marker(marker) + + def raiseerror(self, msg): + """ raise a FixtureLookupError with the given message. """ + raise self._fixturemanager.FixtureLookupError(None, self, msg) + + def _fillfixtures(self): + item = self._pyfuncitem + fixturenames = getattr(item, "fixturenames", self.fixturenames) + for argname in fixturenames: + if argname not in item.funcargs: + item.funcargs[argname] = self.getfixturevalue(argname) + + def getfixturevalue(self, argname): + """ Dynamically run a named fixture function. + + Declaring fixtures via function argument is recommended where possible. + But if you can only decide whether to use another fixture at test + setup time, you may use this function to retrieve it inside a fixture + or test function body. + """ + return self._get_active_fixturedef(argname).cached_result[0] + + def getfuncargvalue(self, argname): + """ Deprecated, use getfixturevalue. """ + from _pytest import deprecated + + warnings.warn(deprecated.GETFUNCARGVALUE, stacklevel=2) + return self.getfixturevalue(argname) + + def _get_active_fixturedef(self, argname): + try: + return self._fixture_defs[argname] + except KeyError: + try: + fixturedef = self._getnextfixturedef(argname) + except FixtureLookupError: + if argname == "request": + cached_result = (self, [0], None) + scope = "function" + return PseudoFixtureDef(cached_result, scope) + raise + # remove indent to prevent the python3 exception + # from leaking into the call + self._compute_fixture_value(fixturedef) + self._fixture_defs[argname] = fixturedef + return fixturedef + + def _get_fixturestack(self): + current = self + values = [] + while 1: + fixturedef = getattr(current, "_fixturedef", None) + if fixturedef is None: + values.reverse() + return values + values.append(fixturedef) + current = current._parent_request + + def _compute_fixture_value(self, fixturedef): + """ + Creates a SubRequest based on "self" and calls the execute method of the given fixturedef object. This will + force the FixtureDef object to throw away any previous results and compute a new fixture value, which + will be stored into the FixtureDef object itself. + + :param FixtureDef fixturedef: + """ + # prepare a subrequest object before calling fixture function + # (latter managed by fixturedef) + argname = fixturedef.argname + funcitem = self._pyfuncitem + scope = fixturedef.scope + try: + param = funcitem.callspec.getparam(argname) + except (AttributeError, ValueError): + param = NOTSET + param_index = 0 + has_params = fixturedef.params is not None + fixtures_not_supported = getattr(funcitem, "nofuncargs", False) + if has_params and fixtures_not_supported: + msg = ( + "{name} does not support fixtures, maybe unittest.TestCase subclass?\n" + "Node id: {nodeid}\n" + "Function type: {typename}" + ).format( + name=funcitem.name, + nodeid=funcitem.nodeid, + typename=type(funcitem).__name__, + ) + fail(msg, pytrace=False) + if has_params: + frame = inspect.stack()[3] + frameinfo = inspect.getframeinfo(frame[0]) + source_path = frameinfo.filename + source_lineno = frameinfo.lineno + source_path = py.path.local(source_path) + if source_path.relto(funcitem.config.rootdir): + source_path = source_path.relto(funcitem.config.rootdir) + msg = ( + "The requested fixture has no parameter defined for test:\n" + " {}\n\n" + "Requested fixture '{}' defined in:\n{}" + "\n\nRequested here:\n{}:{}".format( + funcitem.nodeid, + fixturedef.argname, + getlocation(fixturedef.func, funcitem.config.rootdir), + source_path, + source_lineno, + ) + ) + fail(msg, pytrace=False) + else: + param_index = funcitem.callspec.indices[argname] + # if a parametrize invocation set a scope it will override + # the static scope defined with the fixture function + paramscopenum = funcitem.callspec._arg2scopenum.get(argname) + if paramscopenum is not None: + scope = scopes[paramscopenum] + + subrequest = SubRequest(self, scope, param, param_index, fixturedef) + + # check if a higher-level scoped fixture accesses a lower level one + subrequest._check_scope(argname, self.scope, scope) + + # clear sys.exc_info before invoking the fixture (python bug?) + # if it's not explicitly cleared it will leak into the call + exc_clear() + try: + # call the fixture function + fixturedef.execute(request=subrequest) + finally: + self._schedule_finalizers(fixturedef, subrequest) + + def _schedule_finalizers(self, fixturedef, subrequest): + # if fixture function failed it might have registered finalizers + self.session._setupstate.addfinalizer( + functools.partial(fixturedef.finish, request=subrequest), subrequest.node + ) + + def _check_scope(self, argname, invoking_scope, requested_scope): + if argname == "request": + return + if scopemismatch(invoking_scope, requested_scope): + # try to report something helpful + lines = self._factorytraceback() + fail( + "ScopeMismatch: You tried to access the %r scoped " + "fixture %r with a %r scoped request object, " + "involved factories\n%s" + % ((requested_scope, argname, invoking_scope, "\n".join(lines))), + pytrace=False, + ) + + def _factorytraceback(self): + lines = [] + for fixturedef in self._get_fixturestack(): + factory = fixturedef.func + fs, lineno = getfslineno(factory) + p = self._pyfuncitem.session.fspath.bestrelpath(fs) + args = _format_args(factory) + lines.append("%s:%d: def %s%s" % (p, lineno + 1, factory.__name__, args)) + return lines + + def _getscopeitem(self, scope): + if scope == "function": + # this might also be a non-function Item despite its attribute name + return self._pyfuncitem + if scope == "package": + node = get_scope_package(self._pyfuncitem, self._fixturedef) + else: + node = get_scope_node(self._pyfuncitem, scope) + if node is None and scope == "class": + # fallback to function item itself + node = self._pyfuncitem + assert node, 'Could not obtain a node for scope "{}" for function {!r}'.format( + scope, self._pyfuncitem + ) + return node + + def __repr__(self): + return "" % (self.node) + + +class SubRequest(FixtureRequest): + """ a sub request for handling getting a fixture from a + test function/fixture. """ + + def __init__(self, request, scope, param, param_index, fixturedef): + self._parent_request = request + self.fixturename = fixturedef.argname + if param is not NOTSET: + self.param = param + self.param_index = param_index + self.scope = scope + self._fixturedef = fixturedef + self._pyfuncitem = request._pyfuncitem + self._fixture_defs = request._fixture_defs + self._arg2fixturedefs = request._arg2fixturedefs + self._arg2index = request._arg2index + self._fixturemanager = request._fixturemanager + + def __repr__(self): + return "" % (self.fixturename, self._pyfuncitem) + + def addfinalizer(self, finalizer): + self._fixturedef.addfinalizer(finalizer) + + def _schedule_finalizers(self, fixturedef, subrequest): + # if the executing fixturedef was not explicitly requested in the argument list (via + # getfixturevalue inside the fixture call) then ensure this fixture def will be finished + # first + if fixturedef.argname not in self.funcargnames: + fixturedef.addfinalizer( + functools.partial(self._fixturedef.finish, request=self) + ) + super(SubRequest, self)._schedule_finalizers(fixturedef, subrequest) + + +scopes = "session package module class function".split() +scopenum_function = scopes.index("function") + + +def scopemismatch(currentscope, newscope): + return scopes.index(newscope) > scopes.index(currentscope) + + +def scope2index(scope, descr, where=None): + """Look up the index of ``scope`` and raise a descriptive value error + if not defined. + """ + try: + return scopes.index(scope) + except ValueError: + fail( + "{} {}got an unexpected scope value '{}'".format( + descr, "from {} ".format(where) if where else "", scope + ), + pytrace=False, + ) + + +class FixtureLookupError(LookupError): + """ could not return a requested Fixture (missing or invalid). """ + + def __init__(self, argname, request, msg=None): + self.argname = argname + self.request = request + self.fixturestack = request._get_fixturestack() + self.msg = msg + + def formatrepr(self): + tblines = [] + addline = tblines.append + stack = [self.request._pyfuncitem.obj] + stack.extend(map(lambda x: x.func, self.fixturestack)) + msg = self.msg + if msg is not None: + # the last fixture raise an error, let's present + # it at the requesting side + stack = stack[:-1] + for function in stack: + fspath, lineno = getfslineno(function) + try: + lines, _ = inspect.getsourcelines(get_real_func(function)) + except (IOError, IndexError, TypeError): + error_msg = "file %s, line %s: source code not available" + addline(error_msg % (fspath, lineno + 1)) + else: + addline("file %s, line %s" % (fspath, lineno + 1)) + for i, line in enumerate(lines): + line = line.rstrip() + addline(" " + line) + if line.lstrip().startswith("def"): + break + + if msg is None: + fm = self.request._fixturemanager + available = set() + parentid = self.request._pyfuncitem.parent.nodeid + for name, fixturedefs in fm._arg2fixturedefs.items(): + faclist = list(fm._matchfactories(fixturedefs, parentid)) + if faclist: + available.add(name) + if self.argname in available: + msg = " recursive dependency involving fixture '{}' detected".format( + self.argname + ) + else: + msg = "fixture '{}' not found".format(self.argname) + msg += "\n available fixtures: {}".format(", ".join(sorted(available))) + msg += "\n use 'pytest --fixtures [testpath]' for help on them." + + return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname) + + +class FixtureLookupErrorRepr(TerminalRepr): + def __init__(self, filename, firstlineno, tblines, errorstring, argname): + self.tblines = tblines + self.errorstring = errorstring + self.filename = filename + self.firstlineno = firstlineno + self.argname = argname + + def toterminal(self, tw): + # tw.line("FixtureLookupError: %s" %(self.argname), red=True) + for tbline in self.tblines: + tw.line(tbline.rstrip()) + lines = self.errorstring.split("\n") + if lines: + tw.line( + "{} {}".format(FormattedExcinfo.fail_marker, lines[0].strip()), + red=True, + ) + for line in lines[1:]: + tw.line( + "{} {}".format(FormattedExcinfo.flow_marker, line.strip()), + red=True, + ) + tw.line() + tw.line("%s:%d" % (self.filename, self.firstlineno + 1)) + + +def fail_fixturefunc(fixturefunc, msg): + fs, lineno = getfslineno(fixturefunc) + location = "%s:%s" % (fs, lineno + 1) + source = _pytest._code.Source(fixturefunc) + fail(msg + ":\n\n" + str(source.indent()) + "\n" + location, pytrace=False) + + +def call_fixture_func(fixturefunc, request, kwargs): + yieldctx = is_generator(fixturefunc) + if yieldctx: + it = fixturefunc(**kwargs) + res = next(it) + finalizer = functools.partial(_teardown_yield_fixture, fixturefunc, it) + request.addfinalizer(finalizer) + else: + res = fixturefunc(**kwargs) + return res + + +def _teardown_yield_fixture(fixturefunc, it): + """Executes the teardown of a fixture function by advancing the iterator after the + yield and ensure the iteration ends (if not it means there is more than one yield in the function)""" + try: + next(it) + except StopIteration: + pass + else: + fail_fixturefunc( + fixturefunc, "yield_fixture function has more than one 'yield'" + ) + + +class FixtureDef(object): + """ A container for a factory definition. """ + + def __init__( + self, + fixturemanager, + baseid, + argname, + func, + scope, + params, + unittest=False, + ids=None, + ): + self._fixturemanager = fixturemanager + self.baseid = baseid or "" + self.has_location = baseid is not None + self.func = func + self.argname = argname + self.scope = scope + self.scopenum = scope2index( + scope or "function", + descr="Fixture '{}'".format(func.__name__), + where=baseid, + ) + self.params = params + self.argnames = getfuncargnames(func, is_method=unittest) + self.unittest = unittest + self.ids = ids + self._finalizers = [] + + def addfinalizer(self, finalizer): + self._finalizers.append(finalizer) + + def finish(self, request): + exceptions = [] + try: + while self._finalizers: + try: + func = self._finalizers.pop() + func() + except: # noqa + exceptions.append(sys.exc_info()) + if exceptions: + e = exceptions[0] + # Ensure to not keep frame references through traceback. + del exceptions + six.reraise(*e) + finally: + hook = self._fixturemanager.session.gethookproxy(request.node.fspath) + hook.pytest_fixture_post_finalizer(fixturedef=self, request=request) + # even if finalization fails, we invalidate + # the cached fixture value and remove + # all finalizers because they may be bound methods which will + # keep instances alive + if hasattr(self, "cached_result"): + del self.cached_result + self._finalizers = [] + + def execute(self, request): + # get required arguments and register our own finish() + # with their finalization + for argname in self.argnames: + fixturedef = request._get_active_fixturedef(argname) + if argname != "request": + fixturedef.addfinalizer(functools.partial(self.finish, request=request)) + + my_cache_key = request.param_index + cached_result = getattr(self, "cached_result", None) + if cached_result is not None: + result, cache_key, err = cached_result + if my_cache_key == cache_key: + if err is not None: + six.reraise(*err) + else: + return result + # we have a previous but differently parametrized fixture instance + # so we need to tear it down before creating a new one + self.finish(request) + assert not hasattr(self, "cached_result") + + hook = self._fixturemanager.session.gethookproxy(request.node.fspath) + return hook.pytest_fixture_setup(fixturedef=self, request=request) + + def __repr__(self): + return "" % ( + self.argname, + self.scope, + self.baseid, + ) + + +def resolve_fixture_function(fixturedef, request): + """Gets the actual callable that can be called to obtain the fixture value, dealing with unittest-specific + instances and bound methods. + """ + fixturefunc = fixturedef.func + if fixturedef.unittest: + if request.instance is not None: + # bind the unbound method to the TestCase instance + fixturefunc = fixturedef.func.__get__(request.instance) + else: + # the fixture function needs to be bound to the actual + # request.instance so that code working with "fixturedef" behaves + # as expected. + if request.instance is not None: + fixturefunc = getimfunc(fixturedef.func) + if fixturefunc != fixturedef.func: + fixturefunc = fixturefunc.__get__(request.instance) + return fixturefunc + + +def pytest_fixture_setup(fixturedef, request): + """ Execution of fixture setup. """ + kwargs = {} + for argname in fixturedef.argnames: + fixdef = request._get_active_fixturedef(argname) + result, arg_cache_key, exc = fixdef.cached_result + request._check_scope(argname, request.scope, fixdef.scope) + kwargs[argname] = result + + fixturefunc = resolve_fixture_function(fixturedef, request) + my_cache_key = request.param_index + try: + result = call_fixture_func(fixturefunc, request, kwargs) + except TEST_OUTCOME: + fixturedef.cached_result = (None, my_cache_key, sys.exc_info()) + raise + fixturedef.cached_result = (result, my_cache_key, None) + return result + + +def _ensure_immutable_ids(ids): + if ids is None: + return + if callable(ids): + return ids + return tuple(ids) + + +def wrap_function_to_error_out_if_called_directly(function, fixture_marker): + """Wrap the given fixture function so we can raise an error about it being called directly, + instead of used as an argument in a test function. + """ + message = FIXTURE_FUNCTION_CALL.format( + name=fixture_marker.name or function.__name__ + ) + + @six.wraps(function) + def result(*args, **kwargs): + fail(message, pytrace=False) + + # keep reference to the original function in our own custom attribute so we don't unwrap + # further than this point and lose useful wrappings like @mock.patch (#3774) + result.__pytest_wrapped__ = _PytestWrapper(function) + + return result + + +@attr.s(frozen=True) +class FixtureFunctionMarker(object): + scope = attr.ib() + params = attr.ib(converter=attr.converters.optional(tuple)) + autouse = attr.ib(default=False) + ids = attr.ib(default=None, converter=_ensure_immutable_ids) + name = attr.ib(default=None) + + def __call__(self, function): + if isclass(function): + raise ValueError("class fixtures not supported (maybe in the future)") + + if getattr(function, "_pytestfixturefunction", False): + raise ValueError( + "fixture is being applied more than once to the same function" + ) + + function = wrap_function_to_error_out_if_called_directly(function, self) + + name = self.name or function.__name__ + if name == "request": + warnings.warn(FIXTURE_NAMED_REQUEST) + function._pytestfixturefunction = self + return function + + +def fixture(scope="function", params=None, autouse=False, ids=None, name=None): + """Decorator to mark a fixture factory function. + + This decorator can be used, with or without parameters, to define a + fixture function. + + The name of the fixture function can later be referenced to cause its + invocation ahead of running tests: test + modules or classes can use the ``pytest.mark.usefixtures(fixturename)`` + marker. + + Test functions can directly use fixture names as input + arguments in which case the fixture instance returned from the fixture + function will be injected. + + Fixtures can provide their values to test functions using ``return`` or ``yield`` + statements. When using ``yield`` the code block after the ``yield`` statement is executed + as teardown code regardless of the test outcome, and must yield exactly once. + + :arg scope: the scope for which this fixture is shared, one of + ``"function"`` (default), ``"class"``, ``"module"``, + ``"package"`` or ``"session"``. + + ``"package"`` is considered **experimental** at this time. + + :arg params: an optional list of parameters which will cause multiple + invocations of the fixture function and all of the tests + using it. + The current parameter is available in ``request.param``. + + :arg autouse: if True, the fixture func is activated for all tests that + can see it. If False (the default) then an explicit + reference is needed to activate the fixture. + + :arg ids: list of string ids each corresponding to the params + so that they are part of the test id. If no ids are provided + they will be generated automatically from the params. + + :arg name: the name of the fixture. This defaults to the name of the + decorated function. If a fixture is used in the same module in + which it is defined, the function name of the fixture will be + shadowed by the function arg that requests the fixture; one way + to resolve this is to name the decorated function + ``fixture_`` and then use + ``@pytest.fixture(name='')``. + """ + if callable(scope) and params is None and autouse is False: + # direct decoration + return FixtureFunctionMarker("function", params, autouse, name=name)(scope) + if params is not None and not isinstance(params, (list, tuple)): + params = list(params) + return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name) + + +def yield_fixture(scope="function", params=None, autouse=False, ids=None, name=None): + """ (return a) decorator to mark a yield-fixture factory function. + + .. deprecated:: 3.0 + Use :py:func:`pytest.fixture` directly instead. + """ + return fixture(scope=scope, params=params, autouse=autouse, ids=ids, name=name) + + +defaultfuncargprefixmarker = fixture() + + +@fixture(scope="session") +def pytestconfig(request): + """Session-scoped fixture that returns the :class:`_pytest.config.Config` object. + + Example:: + + def test_foo(pytestconfig): + if pytestconfig.getoption("verbose") > 0: + ... + + """ + return request.config + + +def pytest_addoption(parser): + parser.addini( + "usefixtures", + type="args", + default=[], + help="list of default fixtures to be used with this project", + ) + + +class FixtureManager(object): + """ + pytest fixtures definitions and information is stored and managed + from this class. + + During collection fm.parsefactories() is called multiple times to parse + fixture function definitions into FixtureDef objects and internal + data structures. + + During collection of test functions, metafunc-mechanics instantiate + a FuncFixtureInfo object which is cached per node/func-name. + This FuncFixtureInfo object is later retrieved by Function nodes + which themselves offer a fixturenames attribute. + + The FuncFixtureInfo object holds information about fixtures and FixtureDefs + relevant for a particular function. An initial list of fixtures is + assembled like this: + + - ini-defined usefixtures + - autouse-marked fixtures along the collection chain up from the function + - usefixtures markers at module/class/function level + - test function funcargs + + Subsequently the funcfixtureinfo.fixturenames attribute is computed + as the closure of the fixtures needed to setup the initial fixtures, + i. e. fixtures needed by fixture functions themselves are appended + to the fixturenames list. + + Upon the test-setup phases all fixturenames are instantiated, retrieved + by a lookup of their FuncFixtureInfo. + """ + + FixtureLookupError = FixtureLookupError + FixtureLookupErrorRepr = FixtureLookupErrorRepr + + def __init__(self, session): + self.session = session + self.config = session.config + self._arg2fixturedefs = {} + self._holderobjseen = set() + self._arg2finish = {} + self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))] + session.config.pluginmanager.register(self, "funcmanage") + + def _get_direct_parametrize_args(self, node): + """This function returns all the direct parametrization + arguments of a node, so we don't mistake them for fixtures + + Check https://github.com/pytest-dev/pytest/issues/5036 + + This things are done later as well when dealing with parametrization + so this could be improved + """ + from _pytest.mark import ParameterSet + + parametrize_argnames = [] + for marker in node.iter_markers(name="parametrize"): + if not marker.kwargs.get("indirect", False): + p_argnames, _ = ParameterSet._parse_parametrize_args( + *marker.args, **marker.kwargs + ) + parametrize_argnames.extend(p_argnames) + + return parametrize_argnames + + def getfixtureinfo(self, node, func, cls, funcargs=True): + if funcargs and not getattr(node, "nofuncargs", False): + argnames = getfuncargnames(func, cls=cls) + else: + argnames = () + + usefixtures = itertools.chain.from_iterable( + mark.args for mark in node.iter_markers(name="usefixtures") + ) + initialnames = tuple(usefixtures) + argnames + fm = node.session._fixturemanager + initialnames, names_closure, arg2fixturedefs = fm.getfixtureclosure( + initialnames, node, ignore_args=self._get_direct_parametrize_args(node) + ) + return FuncFixtureInfo(argnames, initialnames, names_closure, arg2fixturedefs) + + def pytest_plugin_registered(self, plugin): + nodeid = None + try: + p = py.path.local(plugin.__file__).realpath() + except AttributeError: + pass + else: + # construct the base nodeid which is later used to check + # what fixtures are visible for particular tests (as denoted + # by their test id) + if p.basename.startswith("conftest.py"): + nodeid = p.dirpath().relto(self.config.rootdir) + if p.sep != nodes.SEP: + nodeid = nodeid.replace(p.sep, nodes.SEP) + + self.parsefactories(plugin, nodeid) + + def _getautousenames(self, nodeid): + """ return a tuple of fixture names to be used. """ + autousenames = [] + for baseid, basenames in self._nodeid_and_autousenames: + if nodeid.startswith(baseid): + if baseid: + i = len(baseid) + nextchar = nodeid[i : i + 1] + if nextchar and nextchar not in ":/": + continue + autousenames.extend(basenames) + return autousenames + + def getfixtureclosure(self, fixturenames, parentnode, ignore_args=()): + # collect the closure of all fixtures , starting with the given + # fixturenames as the initial set. As we have to visit all + # factory definitions anyway, we also return an arg2fixturedefs + # mapping so that the caller can reuse it and does not have + # to re-discover fixturedefs again for each fixturename + # (discovering matching fixtures for a given name/node is expensive) + + parentid = parentnode.nodeid + fixturenames_closure = self._getautousenames(parentid) + + def merge(otherlist): + for arg in otherlist: + if arg not in fixturenames_closure: + fixturenames_closure.append(arg) + + merge(fixturenames) + + # at this point, fixturenames_closure contains what we call "initialnames", + # which is a set of fixturenames the function immediately requests. We + # need to return it as well, so save this. + initialnames = tuple(fixturenames_closure) + + arg2fixturedefs = {} + lastlen = -1 + while lastlen != len(fixturenames_closure): + lastlen = len(fixturenames_closure) + for argname in fixturenames_closure: + if argname in ignore_args: + continue + if argname in arg2fixturedefs: + continue + fixturedefs = self.getfixturedefs(argname, parentid) + if fixturedefs: + arg2fixturedefs[argname] = fixturedefs + merge(fixturedefs[-1].argnames) + + def sort_by_scope(arg_name): + try: + fixturedefs = arg2fixturedefs[arg_name] + except KeyError: + return scopes.index("function") + else: + return fixturedefs[-1].scopenum + + fixturenames_closure.sort(key=sort_by_scope) + return initialnames, fixturenames_closure, arg2fixturedefs + + def pytest_generate_tests(self, metafunc): + for argname in metafunc.fixturenames: + faclist = metafunc._arg2fixturedefs.get(argname) + if faclist: + fixturedef = faclist[-1] + if fixturedef.params is not None: + markers = list(metafunc.definition.iter_markers("parametrize")) + for parametrize_mark in markers: + if "argnames" in parametrize_mark.kwargs: + argnames = parametrize_mark.kwargs["argnames"] + else: + argnames = parametrize_mark.args[0] + + if not isinstance(argnames, (tuple, list)): + argnames = [ + x.strip() for x in argnames.split(",") if x.strip() + ] + if argname in argnames: + break + else: + metafunc.parametrize( + argname, + fixturedef.params, + indirect=True, + scope=fixturedef.scope, + ids=fixturedef.ids, + ) + else: + continue # will raise FixtureLookupError at setup time + + def pytest_collection_modifyitems(self, items): + # separate parametrized setups + items[:] = reorder_items(items) + + def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False): + if nodeid is not NOTSET: + holderobj = node_or_obj + else: + holderobj = node_or_obj.obj + nodeid = node_or_obj.nodeid + if holderobj in self._holderobjseen: + return + + self._holderobjseen.add(holderobj) + autousenames = [] + for name in dir(holderobj): + # The attribute can be an arbitrary descriptor, so the attribute + # access below can raise. safe_getatt() ignores such exceptions. + obj = safe_getattr(holderobj, name, None) + marker = getfixturemarker(obj) + if not isinstance(marker, FixtureFunctionMarker): + # magic globals with __getattr__ might have got us a wrong + # fixture attribute + continue + + if marker.name: + name = marker.name + + # during fixture definition we wrap the original fixture function + # to issue a warning if called directly, so here we unwrap it in order to not emit the warning + # when pytest itself calls the fixture function + if six.PY2 and unittest: + # hack on Python 2 because of the unbound methods + obj = get_real_func(obj) + else: + obj = get_real_method(obj, holderobj) + + fixture_def = FixtureDef( + self, + nodeid, + name, + obj, + marker.scope, + marker.params, + unittest=unittest, + ids=marker.ids, + ) + + faclist = self._arg2fixturedefs.setdefault(name, []) + if fixture_def.has_location: + faclist.append(fixture_def) + else: + # fixturedefs with no location are at the front + # so this inserts the current fixturedef after the + # existing fixturedefs from external plugins but + # before the fixturedefs provided in conftests. + i = len([f for f in faclist if not f.has_location]) + faclist.insert(i, fixture_def) + if marker.autouse: + autousenames.append(name) + + if autousenames: + self._nodeid_and_autousenames.append((nodeid or "", autousenames)) + + def getfixturedefs(self, argname, nodeid): + """ + Gets a list of fixtures which are applicable to the given node id. + + :param str argname: name of the fixture to search for + :param str nodeid: full node id of the requesting test. + :return: list[FixtureDef] + """ + try: + fixturedefs = self._arg2fixturedefs[argname] + except KeyError: + return None + return tuple(self._matchfactories(fixturedefs, nodeid)) + + def _matchfactories(self, fixturedefs, nodeid): + for fixturedef in fixturedefs: + if nodes.ischildnode(fixturedef.baseid, nodeid): + yield fixturedef diff --git a/venv/lib/python2.7/site-packages/_pytest/freeze_support.py b/venv/lib/python2.7/site-packages/_pytest/freeze_support.py new file mode 100644 index 0000000..aeeec2a --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/freeze_support.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +""" +Provides a function to report all internal modules for using freezing tools +pytest +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +def freeze_includes(): + """ + Returns a list of module names used by pytest that should be + included by cx_freeze. + """ + import py + import _pytest + + result = list(_iter_all_modules(py)) + result += list(_iter_all_modules(_pytest)) + return result + + +def _iter_all_modules(package, prefix=""): + """ + Iterates over the names of all modules that can be found in the given + package, recursively. + Example: + _iter_all_modules(_pytest) -> + ['_pytest.assertion.newinterpret', + '_pytest.capture', + '_pytest.core', + ... + ] + """ + import os + import pkgutil + + if type(package) is not str: + path, prefix = package.__path__[0], package.__name__ + "." + else: + path = package + for _, name, is_package in pkgutil.iter_modules([path]): + if is_package: + for m in _iter_all_modules(os.path.join(path, name), prefix=name + "."): + yield prefix + m + else: + yield prefix + name diff --git a/venv/lib/python2.7/site-packages/_pytest/helpconfig.py b/venv/lib/python2.7/site-packages/_pytest/helpconfig.py new file mode 100644 index 0000000..5681160 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/helpconfig.py @@ -0,0 +1,247 @@ +# -*- coding: utf-8 -*- +""" version info, help messages, tracing configuration. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import sys +from argparse import Action + +import py + +import pytest +from _pytest.config import PrintHelp + + +class HelpAction(Action): + """This is an argparse Action that will raise an exception in + order to skip the rest of the argument parsing when --help is passed. + This prevents argparse from quitting due to missing required arguments + when any are defined, for example by ``pytest_addoption``. + This is similar to the way that the builtin argparse --help option is + implemented by raising SystemExit. + """ + + def __init__(self, option_strings, dest=None, default=False, help=None): + super(HelpAction, self).__init__( + option_strings=option_strings, + dest=dest, + const=True, + default=default, + nargs=0, + help=help, + ) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, self.const) + + # We should only skip the rest of the parsing after preparse is done + if getattr(parser._parser, "after_preparse", False): + raise PrintHelp + + +def pytest_addoption(parser): + group = parser.getgroup("debugconfig") + group.addoption( + "--version", + action="store_true", + help="display pytest lib version and import information.", + ) + group._addoption( + "-h", + "--help", + action=HelpAction, + dest="help", + help="show help message and configuration info", + ) + group._addoption( + "-p", + action="append", + dest="plugins", + default=[], + metavar="name", + help="early-load given plugin module name or entry point (multi-allowed). " + "To avoid loading of plugins, use the `no:` prefix, e.g. " + "`no:doctest`.", + ) + group.addoption( + "--traceconfig", + "--trace-config", + action="store_true", + default=False, + help="trace considerations of conftest.py files.", + ), + group.addoption( + "--debug", + action="store_true", + dest="debug", + default=False, + help="store internal tracing debug information in 'pytestdebug.log'.", + ) + group._addoption( + "-o", + "--override-ini", + dest="override_ini", + action="append", + help='override ini option with "option=value" style, e.g. `-o xfail_strict=True -o cache_dir=cache`.', + ) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_cmdline_parse(): + outcome = yield + config = outcome.get_result() + if config.option.debug: + path = os.path.abspath("pytestdebug.log") + debugfile = open(path, "w") + debugfile.write( + "versions pytest-%s, py-%s, " + "python-%s\ncwd=%s\nargs=%s\n\n" + % ( + pytest.__version__, + py.__version__, + ".".join(map(str, sys.version_info)), + os.getcwd(), + config._origargs, + ) + ) + config.trace.root.setwriter(debugfile.write) + undo_tracing = config.pluginmanager.enable_tracing() + sys.stderr.write("writing pytestdebug information to %s\n" % path) + + def unset_tracing(): + debugfile.close() + sys.stderr.write("wrote pytestdebug information to %s\n" % debugfile.name) + config.trace.root.setwriter(None) + undo_tracing() + + config.add_cleanup(unset_tracing) + + +def showversion(config): + p = py.path.local(pytest.__file__) + sys.stderr.write( + "This is pytest version %s, imported from %s\n" % (pytest.__version__, p) + ) + plugininfo = getpluginversioninfo(config) + if plugininfo: + for line in plugininfo: + sys.stderr.write(line + "\n") + + +def pytest_cmdline_main(config): + if config.option.version: + showversion(config) + return 0 + elif config.option.help: + config._do_configure() + showhelp(config) + config._ensure_unconfigure() + return 0 + + +def showhelp(config): + import textwrap + + reporter = config.pluginmanager.get_plugin("terminalreporter") + tw = reporter._tw + tw.write(config._parser.optparser.format_help()) + tw.line() + tw.line( + "[pytest] ini-options in the first pytest.ini|tox.ini|setup.cfg file found:" + ) + tw.line() + + columns = tw.fullwidth # costly call + indent_len = 24 # based on argparse's max_help_position=24 + indent = " " * indent_len + for name in config._parser._ininames: + help, type, default = config._parser._inidict[name] + if type is None: + type = "string" + spec = "%s (%s):" % (name, type) + tw.write(" %s" % spec) + spec_len = len(spec) + if spec_len > (indent_len - 3): + # Display help starting at a new line. + tw.line() + helplines = textwrap.wrap( + help, + columns, + initial_indent=indent, + subsequent_indent=indent, + break_on_hyphens=False, + ) + + for line in helplines: + tw.line(line) + else: + # Display help starting after the spec, following lines indented. + tw.write(" " * (indent_len - spec_len - 2)) + wrapped = textwrap.wrap(help, columns - indent_len, break_on_hyphens=False) + + tw.line(wrapped[0]) + for line in wrapped[1:]: + tw.line(indent + line) + + tw.line() + tw.line("environment variables:") + vars = [ + ("PYTEST_ADDOPTS", "extra command line options"), + ("PYTEST_PLUGINS", "comma-separated plugins to load during startup"), + ("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "set to disable plugin auto-loading"), + ("PYTEST_DEBUG", "set to enable debug tracing of pytest's internals"), + ] + for name, help in vars: + tw.line(" %-24s %s" % (name, help)) + tw.line() + tw.line() + + tw.line("to see available markers type: pytest --markers") + tw.line("to see available fixtures type: pytest --fixtures") + tw.line( + "(shown according to specified file_or_dir or current dir " + "if not specified; fixtures with leading '_' are only shown " + "with the '-v' option" + ) + + for warningreport in reporter.stats.get("warnings", []): + tw.line("warning : " + warningreport.message, red=True) + return + + +conftest_options = [("pytest_plugins", "list of plugin names to load")] + + +def getpluginversioninfo(config): + lines = [] + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + lines.append("setuptools registered plugins:") + for plugin, dist in plugininfo: + loc = getattr(plugin, "__file__", repr(plugin)) + content = "%s-%s at %s" % (dist.project_name, dist.version, loc) + lines.append(" " + content) + return lines + + +def pytest_report_header(config): + lines = [] + if config.option.debug or config.option.traceconfig: + lines.append("using: pytest-%s pylib-%s" % (pytest.__version__, py.__version__)) + + verinfo = getpluginversioninfo(config) + if verinfo: + lines.extend(verinfo) + + if config.option.traceconfig: + lines.append("active plugins:") + items = config.pluginmanager.list_name_plugin() + for name, plugin in items: + if hasattr(plugin, "__file__"): + r = plugin.__file__ + else: + r = repr(plugin) + lines.append(" %-20s: %s" % (name, r)) + return lines diff --git a/venv/lib/python2.7/site-packages/_pytest/hookspec.py b/venv/lib/python2.7/site-packages/_pytest/hookspec.py new file mode 100644 index 0000000..7ab6154 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/hookspec.py @@ -0,0 +1,639 @@ +# -*- coding: utf-8 -*- +""" hook specifications for pytest plugins, invoked from main.py and builtin plugins. """ +from pluggy import HookspecMarker + +from _pytest.deprecated import PYTEST_LOGWARNING + +hookspec = HookspecMarker("pytest") + +# ------------------------------------------------------------------------- +# Initialization hooks called for every plugin +# ------------------------------------------------------------------------- + + +@hookspec(historic=True) +def pytest_addhooks(pluginmanager): + """called at plugin registration time to allow adding new hooks via a call to + ``pluginmanager.add_hookspecs(module_or_class, prefix)``. + + + :param _pytest.config.PytestPluginManager pluginmanager: pytest plugin manager + + .. note:: + This hook is incompatible with ``hookwrapper=True``. + """ + + +@hookspec(historic=True) +def pytest_plugin_registered(plugin, manager): + """ a new pytest plugin got registered. + + :param plugin: the plugin module or instance + :param _pytest.config.PytestPluginManager manager: pytest plugin manager + + .. note:: + This hook is incompatible with ``hookwrapper=True``. + """ + + +@hookspec(historic=True) +def pytest_addoption(parser): + """register argparse-style options and ini-style config values, + called once at the beginning of a test run. + + .. note:: + + This function should be implemented only in plugins or ``conftest.py`` + files situated at the tests root directory due to how pytest + :ref:`discovers plugins during startup `. + + :arg _pytest.config.Parser parser: To add command line options, call + :py:func:`parser.addoption(...) <_pytest.config.Parser.addoption>`. + To add ini-file values call :py:func:`parser.addini(...) + <_pytest.config.Parser.addini>`. + + Options can later be accessed through the + :py:class:`config <_pytest.config.Config>` object, respectively: + + - :py:func:`config.getoption(name) <_pytest.config.Config.getoption>` to + retrieve the value of a command line option. + + - :py:func:`config.getini(name) <_pytest.config.Config.getini>` to retrieve + a value read from an ini-style file. + + The config object is passed around on many internal objects via the ``.config`` + attribute or can be retrieved as the ``pytestconfig`` fixture. + + .. note:: + This hook is incompatible with ``hookwrapper=True``. + """ + + +@hookspec(historic=True) +def pytest_configure(config): + """ + Allows plugins and conftest files to perform initial configuration. + + This hook is called for every plugin and initial conftest file + after command line options have been parsed. + + After that, the hook is called for other conftest files as they are + imported. + + .. note:: + This hook is incompatible with ``hookwrapper=True``. + + :arg _pytest.config.Config config: pytest config object + """ + + +# ------------------------------------------------------------------------- +# Bootstrapping hooks called for plugins registered early enough: +# internal and 3rd party plugins. +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_cmdline_parse(pluginmanager, args): + """return initialized config object, parsing the specified args. + + Stops at first non-None result, see :ref:`firstresult` + + .. note:: + This hook will only be called for plugin classes passed to the ``plugins`` arg when using `pytest.main`_ to + perform an in-process test run. + + :param _pytest.config.PytestPluginManager pluginmanager: pytest plugin manager + :param list[str] args: list of arguments passed on the command line + """ + + +def pytest_cmdline_preparse(config, args): + """(**Deprecated**) modify command line arguments before option parsing. + + This hook is considered deprecated and will be removed in a future pytest version. Consider + using :func:`pytest_load_initial_conftests` instead. + + .. note:: + This hook will not be called for ``conftest.py`` files, only for setuptools plugins. + + :param _pytest.config.Config config: pytest config object + :param list[str] args: list of arguments passed on the command line + """ + + +@hookspec(firstresult=True) +def pytest_cmdline_main(config): + """ called for performing the main command line action. The default + implementation will invoke the configure hooks and runtest_mainloop. + + .. note:: + This hook will not be called for ``conftest.py`` files, only for setuptools plugins. + + Stops at first non-None result, see :ref:`firstresult` + + :param _pytest.config.Config config: pytest config object + """ + + +def pytest_load_initial_conftests(early_config, parser, args): + """ implements the loading of initial conftest files ahead + of command line option parsing. + + .. note:: + This hook will not be called for ``conftest.py`` files, only for setuptools plugins. + + :param _pytest.config.Config early_config: pytest config object + :param list[str] args: list of arguments passed on the command line + :param _pytest.config.Parser parser: to add command line options + """ + + +# ------------------------------------------------------------------------- +# collection hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_collection(session): + """Perform the collection protocol for the given session. + + Stops at first non-None result, see :ref:`firstresult`. + + :param _pytest.main.Session session: the pytest session object + """ + + +def pytest_collection_modifyitems(session, config, items): + """ called after collection has been performed, may filter or re-order + the items in-place. + + :param _pytest.main.Session session: the pytest session object + :param _pytest.config.Config config: pytest config object + :param List[_pytest.nodes.Item] items: list of item objects + """ + + +def pytest_collection_finish(session): + """ called after collection has been performed and modified. + + :param _pytest.main.Session session: the pytest session object + """ + + +@hookspec(firstresult=True) +def pytest_ignore_collect(path, config): + """ return True to prevent considering this path for collection. + This hook is consulted for all files and directories prior to calling + more specific hooks. + + Stops at first non-None result, see :ref:`firstresult` + + :param path: a :py:class:`py.path.local` - the path to analyze + :param _pytest.config.Config config: pytest config object + """ + + +@hookspec(firstresult=True) +def pytest_collect_directory(path, parent): + """ called before traversing a directory for collection files. + + Stops at first non-None result, see :ref:`firstresult` + + :param path: a :py:class:`py.path.local` - the path to analyze + """ + + +def pytest_collect_file(path, parent): + """ return collection Node or None for the given path. Any new node + needs to have the specified ``parent`` as a parent. + + :param path: a :py:class:`py.path.local` - the path to collect + """ + + +# logging hooks for collection + + +def pytest_collectstart(collector): + """ collector starts collecting. """ + + +def pytest_itemcollected(item): + """ we just collected a test item. """ + + +def pytest_collectreport(report): + """ collector finished collecting. """ + + +def pytest_deselected(items): + """ called for test items deselected, e.g. by keyword. """ + + +@hookspec(firstresult=True) +def pytest_make_collect_report(collector): + """ perform ``collector.collect()`` and return a CollectReport. + + Stops at first non-None result, see :ref:`firstresult` """ + + +# ------------------------------------------------------------------------- +# Python test function related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_pycollect_makemodule(path, parent): + """ return a Module collector or None for the given path. + This hook will be called for each matching test module path. + The pytest_collect_file hook needs to be used if you want to + create test modules for files that do not match as a test module. + + Stops at first non-None result, see :ref:`firstresult` + + :param path: a :py:class:`py.path.local` - the path of module to collect + """ + + +@hookspec(firstresult=True) +def pytest_pycollect_makeitem(collector, name, obj): + """ return custom item/collector for a python object in a module, or None. + + Stops at first non-None result, see :ref:`firstresult` """ + + +@hookspec(firstresult=True) +def pytest_pyfunc_call(pyfuncitem): + """ call underlying test function. + + Stops at first non-None result, see :ref:`firstresult` """ + + +def pytest_generate_tests(metafunc): + """ generate (multiple) parametrized calls to a test function.""" + + +@hookspec(firstresult=True) +def pytest_make_parametrize_id(config, val, argname): + """Return a user-friendly string representation of the given ``val`` that will be used + by @pytest.mark.parametrize calls. Return None if the hook doesn't know about ``val``. + The parameter name is available as ``argname``, if required. + + Stops at first non-None result, see :ref:`firstresult` + + :param _pytest.config.Config config: pytest config object + :param val: the parametrized value + :param str argname: the automatic parameter name produced by pytest + """ + + +# ------------------------------------------------------------------------- +# generic runtest related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_runtestloop(session): + """ called for performing the main runtest loop + (after collection finished). + + Stops at first non-None result, see :ref:`firstresult` + + :param _pytest.main.Session session: the pytest session object + """ + + +def pytest_itemstart(item, node): + """(**Deprecated**) use pytest_runtest_logstart. """ + + +@hookspec(firstresult=True) +def pytest_runtest_protocol(item, nextitem): + """ implements the runtest_setup/call/teardown protocol for + the given test item, including capturing exceptions and calling + reporting hooks. + + :arg item: test item for which the runtest protocol is performed. + + :arg nextitem: the scheduled-to-be-next test item (or None if this + is the end my friend). This argument is passed on to + :py:func:`pytest_runtest_teardown`. + + :return boolean: True if no further hook implementations should be invoked. + + + Stops at first non-None result, see :ref:`firstresult` """ + + +def pytest_runtest_logstart(nodeid, location): + """ signal the start of running a single test item. + + This hook will be called **before** :func:`pytest_runtest_setup`, :func:`pytest_runtest_call` and + :func:`pytest_runtest_teardown` hooks. + + :param str nodeid: full id of the item + :param location: a triple of ``(filename, linenum, testname)`` + """ + + +def pytest_runtest_logfinish(nodeid, location): + """ signal the complete finish of running a single test item. + + This hook will be called **after** :func:`pytest_runtest_setup`, :func:`pytest_runtest_call` and + :func:`pytest_runtest_teardown` hooks. + + :param str nodeid: full id of the item + :param location: a triple of ``(filename, linenum, testname)`` + """ + + +def pytest_runtest_setup(item): + """ called before ``pytest_runtest_call(item)``. """ + + +def pytest_runtest_call(item): + """ called to execute the test ``item``. """ + + +def pytest_runtest_teardown(item, nextitem): + """ called after ``pytest_runtest_call``. + + :arg nextitem: the scheduled-to-be-next test item (None if no further + test item is scheduled). This argument can be used to + perform exact teardowns, i.e. calling just enough finalizers + so that nextitem only needs to call setup-functions. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_makereport(item, call): + """ return a :py:class:`_pytest.runner.TestReport` object + for the given :py:class:`pytest.Item <_pytest.main.Item>` and + :py:class:`_pytest.runner.CallInfo`. + + Stops at first non-None result, see :ref:`firstresult` """ + + +def pytest_runtest_logreport(report): + """ process a test setup/call/teardown report relating to + the respective phase of executing a test. """ + + +@hookspec(firstresult=True) +def pytest_report_to_serializable(config, report): + """ + .. warning:: + This hook is experimental and subject to change between pytest releases, even + bug fixes. + + The intent is for this to be used by plugins maintained by the core-devs, such + as ``pytest-xdist``, ``pytest-subtests``, and as a replacement for the internal + 'resultlog' plugin. + + In the future it might become part of the public hook API. + + Serializes the given report object into a data structure suitable for sending + over the wire, e.g. converted to JSON. + """ + + +@hookspec(firstresult=True) +def pytest_report_from_serializable(config, data): + """ + .. warning:: + This hook is experimental and subject to change between pytest releases, even + bug fixes. + + The intent is for this to be used by plugins maintained by the core-devs, such + as ``pytest-xdist``, ``pytest-subtests``, and as a replacement for the internal + 'resultlog' plugin. + + In the future it might become part of the public hook API. + + Restores a report object previously serialized with pytest_report_to_serializable(). + """ + + +# ------------------------------------------------------------------------- +# Fixture related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_fixture_setup(fixturedef, request): + """ performs fixture setup execution. + + :return: The return value of the call to the fixture function + + Stops at first non-None result, see :ref:`firstresult` + + .. note:: + If the fixture function returns None, other implementations of + this hook function will continue to be called, according to the + behavior of the :ref:`firstresult` option. + """ + + +def pytest_fixture_post_finalizer(fixturedef, request): + """ called after fixture teardown, but before the cache is cleared so + the fixture result cache ``fixturedef.cached_result`` can + still be accessed.""" + + +# ------------------------------------------------------------------------- +# test session related hooks +# ------------------------------------------------------------------------- + + +def pytest_sessionstart(session): + """ called after the ``Session`` object has been created and before performing collection + and entering the run test loop. + + :param _pytest.main.Session session: the pytest session object + """ + + +def pytest_sessionfinish(session, exitstatus): + """ called after whole test run finished, right before returning the exit status to the system. + + :param _pytest.main.Session session: the pytest session object + :param int exitstatus: the status which pytest will return to the system + """ + + +def pytest_unconfigure(config): + """ called before test process is exited. + + :param _pytest.config.Config config: pytest config object + """ + + +# ------------------------------------------------------------------------- +# hooks for customizing the assert methods +# ------------------------------------------------------------------------- + + +def pytest_assertrepr_compare(config, op, left, right): + """return explanation for comparisons in failing assert expressions. + + Return None for no custom explanation, otherwise return a list + of strings. The strings will be joined by newlines but any newlines + *in* a string will be escaped. Note that all but the first line will + be indented slightly, the intention is for the first line to be a summary. + + :param _pytest.config.Config config: pytest config object + """ + + +# ------------------------------------------------------------------------- +# hooks for influencing reporting (invoked from _pytest_terminal) +# ------------------------------------------------------------------------- + + +def pytest_report_header(config, startdir): + """ return a string or list of strings to be displayed as header info for terminal reporting. + + :param _pytest.config.Config config: pytest config object + :param startdir: py.path object with the starting dir + + .. note:: + + This function should be implemented only in plugins or ``conftest.py`` + files situated at the tests root directory due to how pytest + :ref:`discovers plugins during startup `. + """ + + +def pytest_report_collectionfinish(config, startdir, items): + """ + .. versionadded:: 3.2 + + return a string or list of strings to be displayed after collection has finished successfully. + + This strings will be displayed after the standard "collected X items" message. + + :param _pytest.config.Config config: pytest config object + :param startdir: py.path object with the starting dir + :param items: list of pytest items that are going to be executed; this list should not be modified. + """ + + +@hookspec(firstresult=True) +def pytest_report_teststatus(report, config): + """ return result-category, shortletter and verbose word for reporting. + + :param _pytest.config.Config config: pytest config object + + Stops at first non-None result, see :ref:`firstresult` """ + + +def pytest_terminal_summary(terminalreporter, exitstatus, config): + """Add a section to terminal summary reporting. + + :param _pytest.terminal.TerminalReporter terminalreporter: the internal terminal reporter object + :param int exitstatus: the exit status that will be reported back to the OS + :param _pytest.config.Config config: pytest config object + + .. versionadded:: 4.2 + The ``config`` parameter. + """ + + +@hookspec(historic=True, warn_on_impl=PYTEST_LOGWARNING) +def pytest_logwarning(message, code, nodeid, fslocation): + """ + .. deprecated:: 3.8 + + This hook is will stop working in a future release. + + pytest no longer triggers this hook, but the + terminal writer still implements it to display warnings issued by + :meth:`_pytest.config.Config.warn` and :meth:`_pytest.nodes.Node.warn`. Calling those functions will be + an error in future releases. + + process a warning specified by a message, a code string, + a nodeid and fslocation (both of which may be None + if the warning is not tied to a particular node/location). + + .. note:: + This hook is incompatible with ``hookwrapper=True``. + """ + + +@hookspec(historic=True) +def pytest_warning_captured(warning_message, when, item): + """ + Process a warning captured by the internal pytest warnings plugin. + + :param warnings.WarningMessage warning_message: + The captured warning. This is the same object produced by :py:func:`warnings.catch_warnings`, and contains + the same attributes as the parameters of :py:func:`warnings.showwarning`. + + :param str when: + Indicates when the warning was captured. Possible values: + + * ``"config"``: during pytest configuration/initialization stage. + * ``"collect"``: during test collection. + * ``"runtest"``: during test execution. + + :param pytest.Item|None item: + **DEPRECATED**: This parameter is incompatible with ``pytest-xdist``, and will always receive ``None`` + in a future release. + + The item being executed if ``when`` is ``"runtest"``, otherwise ``None``. + """ + + +# ------------------------------------------------------------------------- +# doctest hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_doctest_prepare_content(content): + """ return processed content for a given doctest + + Stops at first non-None result, see :ref:`firstresult` """ + + +# ------------------------------------------------------------------------- +# error handling and internal debugging hooks +# ------------------------------------------------------------------------- + + +def pytest_internalerror(excrepr, excinfo): + """ called for internal errors. """ + + +def pytest_keyboard_interrupt(excinfo): + """ called for keyboard interrupt. """ + + +def pytest_exception_interact(node, call, report): + """called when an exception was raised which can potentially be + interactively handled. + + This hook is only called if an exception was raised + that is not an internal exception like ``skip.Exception``. + """ + + +def pytest_enter_pdb(config, pdb): + """ called upon pdb.set_trace(), can be used by plugins to take special + action just before the python debugger enters in interactive mode. + + :param _pytest.config.Config config: pytest config object + :param pdb.Pdb pdb: Pdb instance + """ + + +def pytest_leave_pdb(config, pdb): + """ called when leaving pdb (e.g. with continue after pdb.set_trace()). + + Can be used by plugins to take special action just after the python + debugger leaves interactive mode. + + :param _pytest.config.Config config: pytest config object + :param pdb.Pdb pdb: Pdb instance + """ diff --git a/venv/lib/python2.7/site-packages/_pytest/junitxml.py b/venv/lib/python2.7/site-packages/_pytest/junitxml.py new file mode 100644 index 0000000..853dcb7 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/junitxml.py @@ -0,0 +1,707 @@ +# -*- coding: utf-8 -*- +""" + report test results in JUnit-XML format, + for use with Jenkins and build integration servers. + + +Based on initial code from Ross Lawley. + +Output conforms to https://github.com/jenkinsci/xunit-plugin/blob/master/ +src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import functools +import os +import platform +import re +import sys +import time +from datetime import datetime + +import py +import six + +import pytest +from _pytest import nodes +from _pytest.config import filename_arg + +# Python 2.X and 3.X compatibility +if sys.version_info[0] < 3: + from codecs import open + + +class Junit(py.xml.Namespace): + pass + + +# We need to get the subset of the invalid unicode ranges according to +# XML 1.0 which are valid in this python build. Hence we calculate +# this dynamically instead of hardcoding it. The spec range of valid +# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] +# | [#x10000-#x10FFFF] +_legal_chars = (0x09, 0x0A, 0x0D) +_legal_ranges = ((0x20, 0x7E), (0x80, 0xD7FF), (0xE000, 0xFFFD), (0x10000, 0x10FFFF)) +_legal_xml_re = [ + u"%s-%s" % (six.unichr(low), six.unichr(high)) + for (low, high) in _legal_ranges + if low < sys.maxunicode +] +_legal_xml_re = [six.unichr(x) for x in _legal_chars] + _legal_xml_re +illegal_xml_re = re.compile(u"[^%s]" % u"".join(_legal_xml_re)) +del _legal_chars +del _legal_ranges +del _legal_xml_re + +_py_ext_re = re.compile(r"\.py$") + + +def bin_xml_escape(arg): + def repl(matchobj): + i = ord(matchobj.group()) + if i <= 0xFF: + return u"#x%02X" % i + else: + return u"#x%04X" % i + + return py.xml.raw(illegal_xml_re.sub(repl, py.xml.escape(arg))) + + +def merge_family(left, right): + result = {} + for kl, vl in left.items(): + for kr, vr in right.items(): + if not isinstance(vl, list): + raise TypeError(type(vl)) + result[kl] = vl + vr + left.update(result) + + +families = {} +families["_base"] = {"testcase": ["classname", "name"]} +families["_base_legacy"] = {"testcase": ["file", "line", "url"]} + +# xUnit 1.x inherits legacy attributes +families["xunit1"] = families["_base"].copy() +merge_family(families["xunit1"], families["_base_legacy"]) + +# xUnit 2.x uses strict base attributes +families["xunit2"] = families["_base"] + + +class _NodeReporter(object): + def __init__(self, nodeid, xml): + self.id = nodeid + self.xml = xml + self.add_stats = self.xml.add_stats + self.family = self.xml.family + self.duration = 0 + self.properties = [] + self.nodes = [] + self.testcase = None + self.attrs = {} + + def append(self, node): + self.xml.add_stats(type(node).__name__) + self.nodes.append(node) + + def add_property(self, name, value): + self.properties.append((str(name), bin_xml_escape(value))) + + def add_attribute(self, name, value): + self.attrs[str(name)] = bin_xml_escape(value) + + def make_properties_node(self): + """Return a Junit node containing custom properties, if any. + """ + if self.properties: + return Junit.properties( + [ + Junit.property(name=name, value=value) + for name, value in self.properties + ] + ) + return "" + + def record_testreport(self, testreport): + assert not self.testcase + names = mangle_test_address(testreport.nodeid) + existing_attrs = self.attrs + classnames = names[:-1] + if self.xml.prefix: + classnames.insert(0, self.xml.prefix) + attrs = { + "classname": ".".join(classnames), + "name": bin_xml_escape(names[-1]), + "file": testreport.location[0], + } + if testreport.location[1] is not None: + attrs["line"] = testreport.location[1] + if hasattr(testreport, "url"): + attrs["url"] = testreport.url + self.attrs = attrs + self.attrs.update(existing_attrs) # restore any user-defined attributes + + # Preserve legacy testcase behavior + if self.family == "xunit1": + return + + # Filter out attributes not permitted by this test family. + # Including custom attributes because they are not valid here. + temp_attrs = {} + for key in self.attrs.keys(): + if key in families[self.family]["testcase"]: + temp_attrs[key] = self.attrs[key] + self.attrs = temp_attrs + + def to_xml(self): + testcase = Junit.testcase(time="%.3f" % self.duration, **self.attrs) + testcase.append(self.make_properties_node()) + for node in self.nodes: + testcase.append(node) + return testcase + + def _add_simple(self, kind, message, data=None): + data = bin_xml_escape(data) + node = kind(data, message=message) + self.append(node) + + def write_captured_output(self, report): + if not self.xml.log_passing_tests and report.passed: + return + + content_out = report.capstdout + content_log = report.caplog + content_err = report.capstderr + + if content_log or content_out: + if content_log and self.xml.logging == "system-out": + if content_out: + # syncing stdout and the log-output is not done yet. It's + # probably not worth the effort. Therefore, first the captured + # stdout is shown and then the captured logs. + content = "\n".join( + [ + " Captured Stdout ".center(80, "-"), + content_out, + "", + " Captured Log ".center(80, "-"), + content_log, + ] + ) + else: + content = content_log + else: + content = content_out + + if content: + tag = getattr(Junit, "system-out") + self.append(tag(bin_xml_escape(content))) + + if content_log or content_err: + if content_log and self.xml.logging == "system-err": + if content_err: + content = "\n".join( + [ + " Captured Stderr ".center(80, "-"), + content_err, + "", + " Captured Log ".center(80, "-"), + content_log, + ] + ) + else: + content = content_log + else: + content = content_err + + if content: + tag = getattr(Junit, "system-err") + self.append(tag(bin_xml_escape(content))) + + def append_pass(self, report): + self.add_stats("passed") + + def append_failure(self, report): + # msg = str(report.longrepr.reprtraceback.extraline) + if hasattr(report, "wasxfail"): + self._add_simple(Junit.skipped, "xfail-marked test passes unexpectedly") + else: + if hasattr(report.longrepr, "reprcrash"): + message = report.longrepr.reprcrash.message + elif isinstance(report.longrepr, six.string_types): + message = report.longrepr + else: + message = str(report.longrepr) + message = bin_xml_escape(message) + fail = Junit.failure(message=message) + fail.append(bin_xml_escape(report.longrepr)) + self.append(fail) + + def append_collect_error(self, report): + # msg = str(report.longrepr.reprtraceback.extraline) + self.append( + Junit.error(bin_xml_escape(report.longrepr), message="collection failure") + ) + + def append_collect_skipped(self, report): + self._add_simple(Junit.skipped, "collection skipped", report.longrepr) + + def append_error(self, report): + if report.when == "teardown": + msg = "test teardown failure" + else: + msg = "test setup failure" + self._add_simple(Junit.error, msg, report.longrepr) + + def append_skipped(self, report): + if hasattr(report, "wasxfail"): + xfailreason = report.wasxfail + if xfailreason.startswith("reason: "): + xfailreason = xfailreason[8:] + self.append( + Junit.skipped( + "", type="pytest.xfail", message=bin_xml_escape(xfailreason) + ) + ) + else: + filename, lineno, skipreason = report.longrepr + if skipreason.startswith("Skipped: "): + skipreason = skipreason[9:] + details = "%s:%s: %s" % (filename, lineno, skipreason) + + self.append( + Junit.skipped( + bin_xml_escape(details), + type="pytest.skip", + message=bin_xml_escape(skipreason), + ) + ) + self.write_captured_output(report) + + def finalize(self): + data = self.to_xml().unicode(indent=0) + self.__dict__.clear() + self.to_xml = lambda: py.xml.raw(data) + + +def _warn_incompatibility_with_xunit2(request, fixture_name): + """Emits a PytestWarning about the given fixture being incompatible with newer xunit revisions""" + from _pytest.warning_types import PytestWarning + + xml = getattr(request.config, "_xml", None) + if xml is not None and xml.family not in ("xunit1", "legacy"): + request.node.warn( + PytestWarning( + "{fixture_name} is incompatible with junit_family '{family}' (use 'legacy' or 'xunit1')".format( + fixture_name=fixture_name, family=xml.family + ) + ) + ) + + +@pytest.fixture +def record_property(request): + """Add an extra properties the calling test. + User properties become part of the test report and are available to the + configured reporters, like JUnit XML. + The fixture is callable with ``(name, value)``, with value being automatically + xml-encoded. + + Example:: + + def test_function(record_property): + record_property("example_key", 1) + """ + _warn_incompatibility_with_xunit2(request, "record_property") + + def append_property(name, value): + request.node.user_properties.append((name, value)) + + return append_property + + +@pytest.fixture +def record_xml_attribute(request): + """Add extra xml attributes to the tag for the calling test. + The fixture is callable with ``(name, value)``, with value being + automatically xml-encoded + """ + from _pytest.warning_types import PytestExperimentalApiWarning + + request.node.warn( + PytestExperimentalApiWarning("record_xml_attribute is an experimental feature") + ) + + _warn_incompatibility_with_xunit2(request, "record_xml_attribute") + + # Declare noop + def add_attr_noop(name, value): + pass + + attr_func = add_attr_noop + + xml = getattr(request.config, "_xml", None) + if xml is not None: + node_reporter = xml.node_reporter(request.node.nodeid) + attr_func = node_reporter.add_attribute + + return attr_func + + +def _check_record_param_type(param, v): + """Used by record_testsuite_property to check that the given parameter name is of the proper + type""" + __tracebackhide__ = True + if not isinstance(v, six.string_types): + msg = "{param} parameter needs to be a string, but {g} given" + raise TypeError(msg.format(param=param, g=type(v).__name__)) + + +@pytest.fixture(scope="session") +def record_testsuite_property(request): + """ + Records a new ```` tag as child of the root ````. This is suitable to + writing global information regarding the entire test suite, and is compatible with ``xunit2`` JUnit family. + + This is a ``session``-scoped fixture which is called with ``(name, value)``. Example: + + .. code-block:: python + + def test_foo(record_testsuite_property): + record_testsuite_property("ARCH", "PPC") + record_testsuite_property("STORAGE_TYPE", "CEPH") + + ``name`` must be a string, ``value`` will be converted to a string and properly xml-escaped. + """ + + __tracebackhide__ = True + + def record_func(name, value): + """noop function in case --junitxml was not passed in the command-line""" + __tracebackhide__ = True + _check_record_param_type("name", name) + + xml = getattr(request.config, "_xml", None) + if xml is not None: + record_func = xml.add_global_property # noqa + return record_func + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting") + group.addoption( + "--junitxml", + "--junit-xml", + action="store", + dest="xmlpath", + metavar="path", + type=functools.partial(filename_arg, optname="--junitxml"), + default=None, + help="create junit-xml style report file at given path.", + ) + group.addoption( + "--junitprefix", + "--junit-prefix", + action="store", + metavar="str", + default=None, + help="prepend prefix to classnames in junit-xml output", + ) + parser.addini( + "junit_suite_name", "Test suite name for JUnit report", default="pytest" + ) + parser.addini( + "junit_logging", + "Write captured log messages to JUnit report: " + "one of no|system-out|system-err", + default="no", + ) # choices=['no', 'stdout', 'stderr']) + parser.addini( + "junit_log_passing_tests", + "Capture log information for passing tests to JUnit report: ", + type="bool", + default=True, + ) + parser.addini( + "junit_duration_report", + "Duration time to report: one of total|call", + default="total", + ) # choices=['total', 'call']) + parser.addini( + "junit_family", + "Emit XML for schema: one of legacy|xunit1|xunit2", + default="xunit1", + ) + + +def pytest_configure(config): + xmlpath = config.option.xmlpath + # prevent opening xmllog on slave nodes (xdist) + if xmlpath and not hasattr(config, "slaveinput"): + config._xml = LogXML( + xmlpath, + config.option.junitprefix, + config.getini("junit_suite_name"), + config.getini("junit_logging"), + config.getini("junit_duration_report"), + config.getini("junit_family"), + config.getini("junit_log_passing_tests"), + ) + config.pluginmanager.register(config._xml) + + +def pytest_unconfigure(config): + xml = getattr(config, "_xml", None) + if xml: + del config._xml + config.pluginmanager.unregister(xml) + + +def mangle_test_address(address): + path, possible_open_bracket, params = address.partition("[") + names = path.split("::") + try: + names.remove("()") + except ValueError: + pass + # convert file path to dotted path + names[0] = names[0].replace(nodes.SEP, ".") + names[0] = _py_ext_re.sub("", names[0]) + # put any params back + names[-1] += possible_open_bracket + params + return names + + +class LogXML(object): + def __init__( + self, + logfile, + prefix, + suite_name="pytest", + logging="no", + report_duration="total", + family="xunit1", + log_passing_tests=True, + ): + logfile = os.path.expanduser(os.path.expandvars(logfile)) + self.logfile = os.path.normpath(os.path.abspath(logfile)) + self.prefix = prefix + self.suite_name = suite_name + self.logging = logging + self.log_passing_tests = log_passing_tests + self.report_duration = report_duration + self.family = family + self.stats = dict.fromkeys(["error", "passed", "failure", "skipped"], 0) + self.node_reporters = {} # nodeid -> _NodeReporter + self.node_reporters_ordered = [] + self.global_properties = [] + + # List of reports that failed on call but teardown is pending. + self.open_reports = [] + self.cnt_double_fail_tests = 0 + + # Replaces convenience family with real family + if self.family == "legacy": + self.family = "xunit1" + + def finalize(self, report): + nodeid = getattr(report, "nodeid", report) + # local hack to handle xdist report order + slavenode = getattr(report, "node", None) + reporter = self.node_reporters.pop((nodeid, slavenode)) + if reporter is not None: + reporter.finalize() + + def node_reporter(self, report): + nodeid = getattr(report, "nodeid", report) + # local hack to handle xdist report order + slavenode = getattr(report, "node", None) + + key = nodeid, slavenode + + if key in self.node_reporters: + # TODO: breasks for --dist=each + return self.node_reporters[key] + + reporter = _NodeReporter(nodeid, self) + + self.node_reporters[key] = reporter + self.node_reporters_ordered.append(reporter) + + return reporter + + def add_stats(self, key): + if key in self.stats: + self.stats[key] += 1 + + def _opentestcase(self, report): + reporter = self.node_reporter(report) + reporter.record_testreport(report) + return reporter + + def pytest_runtest_logreport(self, report): + """handle a setup/call/teardown report, generating the appropriate + xml tags as necessary. + + note: due to plugins like xdist, this hook may be called in interlaced + order with reports from other nodes. for example: + + usual call order: + -> setup node1 + -> call node1 + -> teardown node1 + -> setup node2 + -> call node2 + -> teardown node2 + + possible call order in xdist: + -> setup node1 + -> call node1 + -> setup node2 + -> call node2 + -> teardown node2 + -> teardown node1 + """ + close_report = None + if report.passed: + if report.when == "call": # ignore setup/teardown + reporter = self._opentestcase(report) + reporter.append_pass(report) + elif report.failed: + if report.when == "teardown": + # The following vars are needed when xdist plugin is used + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + # We need to open new testcase in case we have failure in + # call and error in teardown in order to follow junit + # schema + self.finalize(close_report) + self.cnt_double_fail_tests += 1 + reporter = self._opentestcase(report) + if report.when == "call": + reporter.append_failure(report) + self.open_reports.append(report) + if not self.log_passing_tests: + reporter.write_captured_output(report) + else: + reporter.append_error(report) + elif report.skipped: + reporter = self._opentestcase(report) + reporter.append_skipped(report) + self.update_testcase_duration(report) + if report.when == "teardown": + reporter = self._opentestcase(report) + reporter.write_captured_output(report) + + for propname, propvalue in report.user_properties: + reporter.add_property(propname, propvalue) + + self.finalize(report) + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + self.open_reports.remove(close_report) + + def update_testcase_duration(self, report): + """accumulates total duration for nodeid from given report and updates + the Junit.testcase with the new total if already created. + """ + if self.report_duration == "total" or report.when == self.report_duration: + reporter = self.node_reporter(report) + reporter.duration += getattr(report, "duration", 0.0) + + def pytest_collectreport(self, report): + if not report.passed: + reporter = self._opentestcase(report) + if report.failed: + reporter.append_collect_error(report) + else: + reporter.append_collect_skipped(report) + + def pytest_internalerror(self, excrepr): + reporter = self.node_reporter("internal") + reporter.attrs.update(classname="pytest", name="internal") + reporter._add_simple(Junit.error, "internal error", excrepr) + + def pytest_sessionstart(self): + self.suite_start_time = time.time() + + def pytest_sessionfinish(self): + dirname = os.path.dirname(os.path.abspath(self.logfile)) + if not os.path.isdir(dirname): + os.makedirs(dirname) + logfile = open(self.logfile, "w", encoding="utf-8") + suite_stop_time = time.time() + suite_time_delta = suite_stop_time - self.suite_start_time + + numtests = ( + self.stats["passed"] + + self.stats["failure"] + + self.stats["skipped"] + + self.stats["error"] + - self.cnt_double_fail_tests + ) + logfile.write('') + + suite_node = Junit.testsuite( + self._get_global_properties_node(), + [x.to_xml() for x in self.node_reporters_ordered], + name=self.suite_name, + errors=self.stats["error"], + failures=self.stats["failure"], + skipped=self.stats["skipped"], + tests=numtests, + time="%.3f" % suite_time_delta, + timestamp=datetime.fromtimestamp(self.suite_start_time).isoformat(), + hostname=platform.node(), + ) + logfile.write(Junit.testsuites([suite_node]).unicode(indent=0)) + logfile.close() + + def pytest_terminal_summary(self, terminalreporter): + terminalreporter.write_sep("-", "generated xml file: %s" % (self.logfile)) + + def add_global_property(self, name, value): + __tracebackhide__ = True + _check_record_param_type("name", name) + self.global_properties.append((name, bin_xml_escape(value))) + + def _get_global_properties_node(self): + """Return a Junit node containing custom properties, if any. + """ + if self.global_properties: + return Junit.properties( + [ + Junit.property(name=name, value=value) + for name, value in self.global_properties + ] + ) + return "" diff --git a/venv/lib/python2.7/site-packages/_pytest/logging.py b/venv/lib/python2.7/site-packages/_pytest/logging.py new file mode 100644 index 0000000..2400737 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/logging.py @@ -0,0 +1,708 @@ +# -*- coding: utf-8 -*- +""" Access and control log capturing. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import logging +import re +from contextlib import contextmanager + +import py +import six + +import pytest +from _pytest.compat import dummy_context_manager +from _pytest.config import create_terminal_writer +from _pytest.pathlib import Path + +DEFAULT_LOG_FORMAT = "%(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s" +DEFAULT_LOG_DATE_FORMAT = "%H:%M:%S" +_ANSI_ESCAPE_SEQ = re.compile(r"\x1b\[[\d;]+m") + + +def _remove_ansi_escape_sequences(text): + return _ANSI_ESCAPE_SEQ.sub("", text) + + +class ColoredLevelFormatter(logging.Formatter): + """ + Colorize the %(levelname)..s part of the log format passed to __init__. + """ + + LOGLEVEL_COLOROPTS = { + logging.CRITICAL: {"red"}, + logging.ERROR: {"red", "bold"}, + logging.WARNING: {"yellow"}, + logging.WARN: {"yellow"}, + logging.INFO: {"green"}, + logging.DEBUG: {"purple"}, + logging.NOTSET: set(), + } + LEVELNAME_FMT_REGEX = re.compile(r"%\(levelname\)([+-]?\d*s)") + + def __init__(self, terminalwriter, *args, **kwargs): + super(ColoredLevelFormatter, self).__init__(*args, **kwargs) + if six.PY2: + self._original_fmt = self._fmt + else: + self._original_fmt = self._style._fmt + self._level_to_fmt_mapping = {} + + levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt) + if not levelname_fmt_match: + return + levelname_fmt = levelname_fmt_match.group() + + for level, color_opts in self.LOGLEVEL_COLOROPTS.items(): + formatted_levelname = levelname_fmt % { + "levelname": logging.getLevelName(level) + } + + # add ANSI escape sequences around the formatted levelname + color_kwargs = {name: True for name in color_opts} + colorized_formatted_levelname = terminalwriter.markup( + formatted_levelname, **color_kwargs + ) + self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub( + colorized_formatted_levelname, self._fmt + ) + + def format(self, record): + fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt) + if six.PY2: + self._fmt = fmt + else: + self._style._fmt = fmt + return super(ColoredLevelFormatter, self).format(record) + + +if not six.PY2: + # Formatter classes don't support format styles in PY2 + + class PercentStyleMultiline(logging.PercentStyle): + """A logging style with special support for multiline messages. + + If the message of a record consists of multiple lines, this style + formats the message as if each line were logged separately. + """ + + @staticmethod + def _update_message(record_dict, message): + tmp = record_dict.copy() + tmp["message"] = message + return tmp + + def format(self, record): + if "\n" in record.message: + lines = record.message.splitlines() + formatted = self._fmt % self._update_message(record.__dict__, lines[0]) + # TODO optimize this by introducing an option that tells the + # logging framework that the indentation doesn't + # change. This allows to compute the indentation only once. + indentation = _remove_ansi_escape_sequences(formatted).find(lines[0]) + lines[0] = formatted + return ("\n" + " " * indentation).join(lines) + else: + return self._fmt % record.__dict__ + + +def get_option_ini(config, *names): + for name in names: + ret = config.getoption(name) # 'default' arg won't work as expected + if ret is None: + ret = config.getini(name) + if ret: + return ret + + +def pytest_addoption(parser): + """Add options to control log capturing.""" + group = parser.getgroup("logging") + + def add_option_ini(option, dest, default=None, type=None, **kwargs): + parser.addini( + dest, default=default, type=type, help="default value for " + option + ) + group.addoption(option, dest=dest, **kwargs) + + add_option_ini( + "--no-print-logs", + dest="log_print", + action="store_const", + const=False, + default=True, + type="bool", + help="disable printing caught logs on failed tests.", + ) + add_option_ini( + "--log-level", + dest="log_level", + default=None, + help="logging level used by the logging module", + ) + add_option_ini( + "--log-format", + dest="log_format", + default=DEFAULT_LOG_FORMAT, + help="log format as used by the logging module.", + ) + add_option_ini( + "--log-date-format", + dest="log_date_format", + default=DEFAULT_LOG_DATE_FORMAT, + help="log date format as used by the logging module.", + ) + parser.addini( + "log_cli", + default=False, + type="bool", + help='enable log display during test run (also known as "live logging").', + ) + add_option_ini( + "--log-cli-level", dest="log_cli_level", default=None, help="cli logging level." + ) + add_option_ini( + "--log-cli-format", + dest="log_cli_format", + default=None, + help="log format as used by the logging module.", + ) + add_option_ini( + "--log-cli-date-format", + dest="log_cli_date_format", + default=None, + help="log date format as used by the logging module.", + ) + add_option_ini( + "--log-file", + dest="log_file", + default=None, + help="path to a file when logging will be written to.", + ) + add_option_ini( + "--log-file-level", + dest="log_file_level", + default=None, + help="log file logging level.", + ) + add_option_ini( + "--log-file-format", + dest="log_file_format", + default=DEFAULT_LOG_FORMAT, + help="log format as used by the logging module.", + ) + add_option_ini( + "--log-file-date-format", + dest="log_file_date_format", + default=DEFAULT_LOG_DATE_FORMAT, + help="log date format as used by the logging module.", + ) + + +@contextmanager +def catching_logs(handler, formatter=None, level=None): + """Context manager that prepares the whole logging machinery properly.""" + root_logger = logging.getLogger() + + if formatter is not None: + handler.setFormatter(formatter) + if level is not None: + handler.setLevel(level) + + # Adding the same handler twice would confuse logging system. + # Just don't do that. + add_new_handler = handler not in root_logger.handlers + + if add_new_handler: + root_logger.addHandler(handler) + if level is not None: + orig_level = root_logger.level + root_logger.setLevel(min(orig_level, level)) + try: + yield handler + finally: + if level is not None: + root_logger.setLevel(orig_level) + if add_new_handler: + root_logger.removeHandler(handler) + + +class LogCaptureHandler(logging.StreamHandler): + """A logging handler that stores log records and the log text.""" + + def __init__(self): + """Creates a new log handler.""" + logging.StreamHandler.__init__(self, py.io.TextIO()) + self.records = [] + + def emit(self, record): + """Keep the log records in a list in addition to the log text.""" + self.records.append(record) + logging.StreamHandler.emit(self, record) + + def reset(self): + self.records = [] + self.stream = py.io.TextIO() + + +class LogCaptureFixture(object): + """Provides access and control of log capturing.""" + + def __init__(self, item): + """Creates a new funcarg.""" + self._item = item + # dict of log name -> log level + self._initial_log_levels = {} # Dict[str, int] + + def _finalize(self): + """Finalizes the fixture. + + This restores the log levels changed by :meth:`set_level`. + """ + # restore log levels + for logger_name, level in self._initial_log_levels.items(): + logger = logging.getLogger(logger_name) + logger.setLevel(level) + + @property + def handler(self): + """ + :rtype: LogCaptureHandler + """ + return self._item.catch_log_handler + + def get_records(self, when): + """ + Get the logging records for one of the possible test phases. + + :param str when: + Which test phase to obtain the records from. Valid values are: "setup", "call" and "teardown". + + :rtype: List[logging.LogRecord] + :return: the list of captured records at the given stage + + .. versionadded:: 3.4 + """ + handler = self._item.catch_log_handlers.get(when) + if handler: + return handler.records + else: + return [] + + @property + def text(self): + """Returns the formatted log text.""" + return _remove_ansi_escape_sequences(self.handler.stream.getvalue()) + + @property + def records(self): + """Returns the list of log records.""" + return self.handler.records + + @property + def record_tuples(self): + """Returns a list of a stripped down version of log records intended + for use in assertion comparison. + + The format of the tuple is: + + (logger_name, log_level, message) + """ + return [(r.name, r.levelno, r.getMessage()) for r in self.records] + + @property + def messages(self): + """Returns a list of format-interpolated log messages. + + Unlike 'records', which contains the format string and parameters for interpolation, log messages in this list + are all interpolated. + Unlike 'text', which contains the output from the handler, log messages in this list are unadorned with + levels, timestamps, etc, making exact comparisons more reliable. + + Note that traceback or stack info (from :func:`logging.exception` or the `exc_info` or `stack_info` arguments + to the logging functions) is not included, as this is added by the formatter in the handler. + + .. versionadded:: 3.7 + """ + return [r.getMessage() for r in self.records] + + def clear(self): + """Reset the list of log records and the captured log text.""" + self.handler.reset() + + def set_level(self, level, logger=None): + """Sets the level for capturing of logs. The level will be restored to its previous value at the end of + the test. + + :param int level: the logger to level. + :param str logger: the logger to update the level. If not given, the root logger level is updated. + + .. versionchanged:: 3.4 + The levels of the loggers changed by this function will be restored to their initial values at the + end of the test. + """ + logger_name = logger + logger = logging.getLogger(logger_name) + # save the original log-level to restore it during teardown + self._initial_log_levels.setdefault(logger_name, logger.level) + logger.setLevel(level) + + @contextmanager + def at_level(self, level, logger=None): + """Context manager that sets the level for capturing of logs. After the end of the 'with' statement the + level is restored to its original value. + + :param int level: the logger to level. + :param str logger: the logger to update the level. If not given, the root logger level is updated. + """ + logger = logging.getLogger(logger) + orig_level = logger.level + logger.setLevel(level) + try: + yield + finally: + logger.setLevel(orig_level) + + +@pytest.fixture +def caplog(request): + """Access and control log capturing. + + Captured logs are available through the following properties/methods:: + + * caplog.text -> string containing formatted log output + * caplog.records -> list of logging.LogRecord instances + * caplog.record_tuples -> list of (logger_name, level, message) tuples + * caplog.clear() -> clear captured records and formatted log output string + """ + result = LogCaptureFixture(request.node) + yield result + result._finalize() + + +def get_actual_log_level(config, *setting_names): + """Return the actual logging level.""" + + for setting_name in setting_names: + log_level = config.getoption(setting_name) + if log_level is None: + log_level = config.getini(setting_name) + if log_level: + break + else: + return + + if isinstance(log_level, six.string_types): + log_level = log_level.upper() + try: + return int(getattr(logging, log_level, log_level)) + except ValueError: + # Python logging does not recognise this as a logging level + raise pytest.UsageError( + "'{}' is not recognized as a logging level name for " + "'{}'. Please consider passing the " + "logging level num instead.".format(log_level, setting_name) + ) + + +# run after terminalreporter/capturemanager are configured +@pytest.hookimpl(trylast=True) +def pytest_configure(config): + config.pluginmanager.register(LoggingPlugin(config), "logging-plugin") + + +class LoggingPlugin(object): + """Attaches to the logging module and captures log messages for each test. + """ + + def __init__(self, config): + """Creates a new plugin to capture log messages. + + The formatter can be safely shared across all handlers so + create a single one for the entire test session here. + """ + self._config = config + + self.print_logs = get_option_ini(config, "log_print") + self.formatter = self._create_formatter( + get_option_ini(config, "log_format"), + get_option_ini(config, "log_date_format"), + ) + self.log_level = get_actual_log_level(config, "log_level") + + self.log_file_level = get_actual_log_level(config, "log_file_level") + self.log_file_format = get_option_ini(config, "log_file_format", "log_format") + self.log_file_date_format = get_option_ini( + config, "log_file_date_format", "log_date_format" + ) + self.log_file_formatter = logging.Formatter( + self.log_file_format, datefmt=self.log_file_date_format + ) + + log_file = get_option_ini(config, "log_file") + if log_file: + self.log_file_handler = logging.FileHandler( + log_file, mode="w", encoding="UTF-8" + ) + self.log_file_handler.setFormatter(self.log_file_formatter) + else: + self.log_file_handler = None + + self.log_cli_handler = None + + self.live_logs_context = lambda: dummy_context_manager() + # Note that the lambda for the live_logs_context is needed because + # live_logs_context can otherwise not be entered multiple times due + # to limitations of contextlib.contextmanager. + + if self._log_cli_enabled(): + self._setup_cli_logging() + + def _create_formatter(self, log_format, log_date_format): + # color option doesn't exist if terminal plugin is disabled + color = getattr(self._config.option, "color", "no") + if color != "no" and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search( + log_format + ): + formatter = ColoredLevelFormatter( + create_terminal_writer(self._config), log_format, log_date_format + ) + else: + formatter = logging.Formatter(log_format, log_date_format) + + if not six.PY2: + formatter._style = PercentStyleMultiline(formatter._style._fmt) + return formatter + + def _setup_cli_logging(self): + config = self._config + terminal_reporter = config.pluginmanager.get_plugin("terminalreporter") + if terminal_reporter is None: + # terminal reporter is disabled e.g. by pytest-xdist. + return + + capture_manager = config.pluginmanager.get_plugin("capturemanager") + # if capturemanager plugin is disabled, live logging still works. + log_cli_handler = _LiveLoggingStreamHandler(terminal_reporter, capture_manager) + + log_cli_formatter = self._create_formatter( + get_option_ini(config, "log_cli_format", "log_format"), + get_option_ini(config, "log_cli_date_format", "log_date_format"), + ) + + log_cli_level = get_actual_log_level(config, "log_cli_level", "log_level") + self.log_cli_handler = log_cli_handler + self.live_logs_context = lambda: catching_logs( + log_cli_handler, formatter=log_cli_formatter, level=log_cli_level + ) + + def set_log_path(self, fname): + """Public method, which can set filename parameter for + Logging.FileHandler(). Also creates parent directory if + it does not exist. + + .. warning:: + Please considered as an experimental API. + """ + fname = Path(fname) + + if not fname.is_absolute(): + fname = Path(self._config.rootdir, fname) + + if not fname.parent.exists(): + fname.parent.mkdir(exist_ok=True, parents=True) + + self.log_file_handler = logging.FileHandler( + str(fname), mode="w", encoding="UTF-8" + ) + self.log_file_handler.setFormatter(self.log_file_formatter) + + def _log_cli_enabled(self): + """Return True if log_cli should be considered enabled, either explicitly + or because --log-cli-level was given in the command-line. + """ + return self._config.getoption( + "--log-cli-level" + ) is not None or self._config.getini("log_cli") + + @pytest.hookimpl(hookwrapper=True, tryfirst=True) + def pytest_collection(self): + with self.live_logs_context(): + if self.log_cli_handler: + self.log_cli_handler.set_when("collection") + + if self.log_file_handler is not None: + with catching_logs(self.log_file_handler, level=self.log_file_level): + yield + else: + yield + + @contextmanager + def _runtest_for(self, item, when): + with self._runtest_for_main(item, when): + if self.log_file_handler is not None: + with catching_logs(self.log_file_handler, level=self.log_file_level): + yield + else: + yield + + @contextmanager + def _runtest_for_main(self, item, when): + """Implements the internals of pytest_runtest_xxx() hook.""" + with catching_logs( + LogCaptureHandler(), formatter=self.formatter, level=self.log_level + ) as log_handler: + if self.log_cli_handler: + self.log_cli_handler.set_when(when) + + if item is None: + yield # run the test + return + + if not hasattr(item, "catch_log_handlers"): + item.catch_log_handlers = {} + item.catch_log_handlers[when] = log_handler + item.catch_log_handler = log_handler + try: + yield # run test + finally: + if when == "teardown": + del item.catch_log_handler + del item.catch_log_handlers + + if self.print_logs: + # Add a captured log section to the report. + log = log_handler.stream.getvalue().strip() + item.add_report_section(when, "log", log) + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_setup(self, item): + with self._runtest_for(item, "setup"): + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_call(self, item): + with self._runtest_for(item, "call"): + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_teardown(self, item): + with self._runtest_for(item, "teardown"): + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_logstart(self): + if self.log_cli_handler: + self.log_cli_handler.reset() + with self._runtest_for(None, "start"): + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_logfinish(self): + with self._runtest_for(None, "finish"): + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_logreport(self): + with self._runtest_for(None, "logreport"): + yield + + @pytest.hookimpl(hookwrapper=True, tryfirst=True) + def pytest_sessionfinish(self): + with self.live_logs_context(): + if self.log_cli_handler: + self.log_cli_handler.set_when("sessionfinish") + if self.log_file_handler is not None: + try: + with catching_logs( + self.log_file_handler, level=self.log_file_level + ): + yield + finally: + # Close the FileHandler explicitly. + # (logging.shutdown might have lost the weakref?!) + self.log_file_handler.close() + else: + yield + + @pytest.hookimpl(hookwrapper=True, tryfirst=True) + def pytest_sessionstart(self): + with self.live_logs_context(): + if self.log_cli_handler: + self.log_cli_handler.set_when("sessionstart") + if self.log_file_handler is not None: + with catching_logs(self.log_file_handler, level=self.log_file_level): + yield + else: + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtestloop(self, session): + """Runs all collected test items.""" + + if session.config.option.collectonly: + yield + return + + if self._log_cli_enabled() and self._config.getoption("verbose") < 1: + # setting verbose flag is needed to avoid messy test progress output + self._config.option.verbose = 1 + + with self.live_logs_context(): + if self.log_file_handler is not None: + with catching_logs(self.log_file_handler, level=self.log_file_level): + yield # run all the tests + else: + yield # run all the tests + + +class _LiveLoggingStreamHandler(logging.StreamHandler): + """ + Custom StreamHandler used by the live logging feature: it will write a newline before the first log message + in each test. + + During live logging we must also explicitly disable stdout/stderr capturing otherwise it will get captured + and won't appear in the terminal. + """ + + def __init__(self, terminal_reporter, capture_manager): + """ + :param _pytest.terminal.TerminalReporter terminal_reporter: + :param _pytest.capture.CaptureManager capture_manager: + """ + logging.StreamHandler.__init__(self, stream=terminal_reporter) + self.capture_manager = capture_manager + self.reset() + self.set_when(None) + self._test_outcome_written = False + + def reset(self): + """Reset the handler; should be called before the start of each test""" + self._first_record_emitted = False + + def set_when(self, when): + """Prepares for the given test phase (setup/call/teardown)""" + self._when = when + self._section_name_shown = False + if when == "start": + self._test_outcome_written = False + + def emit(self, record): + ctx_manager = ( + self.capture_manager.global_and_fixture_disabled() + if self.capture_manager + else dummy_context_manager() + ) + with ctx_manager: + if not self._first_record_emitted: + self.stream.write("\n") + self._first_record_emitted = True + elif self._when in ("teardown", "finish"): + if not self._test_outcome_written: + self._test_outcome_written = True + self.stream.write("\n") + if not self._section_name_shown and self._when: + self.stream.section("live log " + self._when, sep="-", bold=True) + self._section_name_shown = True + logging.StreamHandler.emit(self, record) diff --git a/venv/lib/python2.7/site-packages/_pytest/main.py b/venv/lib/python2.7/site-packages/_pytest/main.py new file mode 100644 index 0000000..a9d310c --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/main.py @@ -0,0 +1,780 @@ +# -*- coding: utf-8 -*- +""" core implementation of testing process: init, session, runtest loop. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import contextlib +import fnmatch +import functools +import os +import pkgutil +import sys +import warnings + +import attr +import py +import six + +import _pytest._code +from _pytest import nodes +from _pytest.config import directory_arg +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.deprecated import PYTEST_CONFIG_GLOBAL +from _pytest.outcomes import exit +from _pytest.runner import collect_one_node + +# exitcodes for the command line +EXIT_OK = 0 +EXIT_TESTSFAILED = 1 +EXIT_INTERRUPTED = 2 +EXIT_INTERNALERROR = 3 +EXIT_USAGEERROR = 4 +EXIT_NOTESTSCOLLECTED = 5 + + +def pytest_addoption(parser): + parser.addini( + "norecursedirs", + "directory patterns to avoid for recursion", + type="args", + default=[".*", "build", "dist", "CVS", "_darcs", "{arch}", "*.egg", "venv"], + ) + parser.addini( + "testpaths", + "directories to search for tests when no files or directories are given in the " + "command line.", + type="args", + default=[], + ) + group = parser.getgroup("general", "running and selection options") + group._addoption( + "-x", + "--exitfirst", + action="store_const", + dest="maxfail", + const=1, + help="exit instantly on first error or failed test.", + ), + group._addoption( + "--maxfail", + metavar="num", + action="store", + type=int, + dest="maxfail", + default=0, + help="exit after first num failures or errors.", + ) + group._addoption( + "--strict-markers", + "--strict", + action="store_true", + help="markers not registered in the `markers` section of the configuration file raise errors.", + ) + group._addoption( + "-c", + metavar="file", + type=str, + dest="inifilename", + help="load configuration from `file` instead of trying to locate one of the implicit " + "configuration files.", + ) + group._addoption( + "--continue-on-collection-errors", + action="store_true", + default=False, + dest="continue_on_collection_errors", + help="Force test execution even if collection errors occur.", + ) + group._addoption( + "--rootdir", + action="store", + dest="rootdir", + help="Define root directory for tests. Can be relative path: 'root_dir', './root_dir', " + "'root_dir/another_dir/'; absolute path: '/home/user/root_dir'; path with variables: " + "'$HOME/root_dir'.", + ) + + group = parser.getgroup("collect", "collection") + group.addoption( + "--collectonly", + "--collect-only", + action="store_true", + help="only collect tests, don't execute them.", + ), + group.addoption( + "--pyargs", + action="store_true", + help="try to interpret all arguments as python packages.", + ) + group.addoption( + "--ignore", + action="append", + metavar="path", + help="ignore path during collection (multi-allowed).", + ) + group.addoption( + "--ignore-glob", + action="append", + metavar="path", + help="ignore path pattern during collection (multi-allowed).", + ) + group.addoption( + "--deselect", + action="append", + metavar="nodeid_prefix", + help="deselect item during collection (multi-allowed).", + ) + # when changing this to --conf-cut-dir, config.py Conftest.setinitial + # needs upgrading as well + group.addoption( + "--confcutdir", + dest="confcutdir", + default=None, + metavar="dir", + type=functools.partial(directory_arg, optname="--confcutdir"), + help="only load conftest.py's relative to specified dir.", + ) + group.addoption( + "--noconftest", + action="store_true", + dest="noconftest", + default=False, + help="Don't load any conftest.py files.", + ) + group.addoption( + "--keepduplicates", + "--keep-duplicates", + action="store_true", + dest="keepduplicates", + default=False, + help="Keep duplicate tests.", + ) + group.addoption( + "--collect-in-virtualenv", + action="store_true", + dest="collect_in_virtualenv", + default=False, + help="Don't ignore tests in a local virtualenv directory", + ) + + group = parser.getgroup("debugconfig", "test session debugging and configuration") + group.addoption( + "--basetemp", + dest="basetemp", + default=None, + metavar="dir", + help=( + "base temporary directory for this test run." + "(warning: this directory is removed if it exists)" + ), + ) + + +class _ConfigDeprecated(object): + def __init__(self, config): + self.__dict__["_config"] = config + + def __getattr__(self, attr): + warnings.warn(PYTEST_CONFIG_GLOBAL, stacklevel=2) + return getattr(self._config, attr) + + def __setattr__(self, attr, val): + warnings.warn(PYTEST_CONFIG_GLOBAL, stacklevel=2) + return setattr(self._config, attr, val) + + def __repr__(self): + return "{}({!r})".format(type(self).__name__, self._config) + + +def pytest_configure(config): + __import__("pytest").config = _ConfigDeprecated(config) # compatibility + + +def wrap_session(config, doit): + """Skeleton command line program""" + session = Session(config) + session.exitstatus = EXIT_OK + initstate = 0 + try: + try: + config._do_configure() + initstate = 1 + config.hook.pytest_sessionstart(session=session) + initstate = 2 + session.exitstatus = doit(config, session) or 0 + except UsageError: + session.exitstatus = EXIT_USAGEERROR + raise + except Failed: + session.exitstatus = EXIT_TESTSFAILED + except (KeyboardInterrupt, exit.Exception): + excinfo = _pytest._code.ExceptionInfo.from_current() + exitstatus = EXIT_INTERRUPTED + if isinstance(excinfo.value, exit.Exception): + if excinfo.value.returncode is not None: + exitstatus = excinfo.value.returncode + if initstate < 2: + sys.stderr.write( + "{}: {}\n".format(excinfo.typename, excinfo.value.msg) + ) + config.hook.pytest_keyboard_interrupt(excinfo=excinfo) + session.exitstatus = exitstatus + except: # noqa + excinfo = _pytest._code.ExceptionInfo.from_current() + config.notify_exception(excinfo, config.option) + session.exitstatus = EXIT_INTERNALERROR + if excinfo.errisinstance(SystemExit): + sys.stderr.write("mainloop: caught unexpected SystemExit!\n") + + finally: + excinfo = None # Explicitly break reference cycle. + session.startdir.chdir() + if initstate >= 2: + config.hook.pytest_sessionfinish( + session=session, exitstatus=session.exitstatus + ) + config._ensure_unconfigure() + return session.exitstatus + + +def pytest_cmdline_main(config): + return wrap_session(config, _main) + + +def _main(config, session): + """ default command line protocol for initialization, session, + running tests and reporting. """ + config.hook.pytest_collection(session=session) + config.hook.pytest_runtestloop(session=session) + + if session.testsfailed: + return EXIT_TESTSFAILED + elif session.testscollected == 0: + return EXIT_NOTESTSCOLLECTED + + +def pytest_collection(session): + return session.perform_collect() + + +def pytest_runtestloop(session): + if session.testsfailed and not session.config.option.continue_on_collection_errors: + raise session.Interrupted("%d errors during collection" % session.testsfailed) + + if session.config.option.collectonly: + return True + + for i, item in enumerate(session.items): + nextitem = session.items[i + 1] if i + 1 < len(session.items) else None + item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) + if session.shouldfail: + raise session.Failed(session.shouldfail) + if session.shouldstop: + raise session.Interrupted(session.shouldstop) + return True + + +def _in_venv(path): + """Attempts to detect if ``path`` is the root of a Virtual Environment by + checking for the existence of the appropriate activate script""" + bindir = path.join("Scripts" if sys.platform.startswith("win") else "bin") + if not bindir.isdir(): + return False + activates = ( + "activate", + "activate.csh", + "activate.fish", + "Activate", + "Activate.bat", + "Activate.ps1", + ) + return any([fname.basename in activates for fname in bindir.listdir()]) + + +def pytest_ignore_collect(path, config): + ignore_paths = config._getconftest_pathlist("collect_ignore", path=path.dirpath()) + ignore_paths = ignore_paths or [] + excludeopt = config.getoption("ignore") + if excludeopt: + ignore_paths.extend([py.path.local(x) for x in excludeopt]) + + if py.path.local(path) in ignore_paths: + return True + + ignore_globs = config._getconftest_pathlist( + "collect_ignore_glob", path=path.dirpath() + ) + ignore_globs = ignore_globs or [] + excludeglobopt = config.getoption("ignore_glob") + if excludeglobopt: + ignore_globs.extend([py.path.local(x) for x in excludeglobopt]) + + if any( + fnmatch.fnmatch(six.text_type(path), six.text_type(glob)) + for glob in ignore_globs + ): + return True + + allow_in_venv = config.getoption("collect_in_virtualenv") + if not allow_in_venv and _in_venv(path): + return True + + return False + + +def pytest_collection_modifyitems(items, config): + deselect_prefixes = tuple(config.getoption("deselect") or []) + if not deselect_prefixes: + return + + remaining = [] + deselected = [] + for colitem in items: + if colitem.nodeid.startswith(deselect_prefixes): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +@contextlib.contextmanager +def _patched_find_module(): + """Patch bug in pkgutil.ImpImporter.find_module + + When using pkgutil.find_loader on python<3.4 it removes symlinks + from the path due to a call to os.path.realpath. This is not consistent + with actually doing the import (in these versions, pkgutil and __import__ + did not share the same underlying code). This can break conftest + discovery for pytest where symlinks are involved. + + The only supported python<3.4 by pytest is python 2.7. + """ + if six.PY2: # python 3.4+ uses importlib instead + + def find_module_patched(self, fullname, path=None): + # Note: we ignore 'path' argument since it is only used via meta_path + subname = fullname.split(".")[-1] + if subname != fullname and self.path is None: + return None + if self.path is None: + path = None + else: + # original: path = [os.path.realpath(self.path)] + path = [self.path] + try: + file, filename, etc = pkgutil.imp.find_module(subname, path) + except ImportError: + return None + return pkgutil.ImpLoader(fullname, file, filename, etc) + + old_find_module = pkgutil.ImpImporter.find_module + pkgutil.ImpImporter.find_module = find_module_patched + try: + yield + finally: + pkgutil.ImpImporter.find_module = old_find_module + else: + yield + + +class FSHookProxy(object): + def __init__(self, fspath, pm, remove_mods): + self.fspath = fspath + self.pm = pm + self.remove_mods = remove_mods + + def __getattr__(self, name): + x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods) + self.__dict__[name] = x + return x + + +class NoMatch(Exception): + """ raised if matching cannot locate a matching names. """ + + +class Interrupted(KeyboardInterrupt): + """ signals an interrupted test run. """ + + __module__ = "builtins" # for py3 + + +class Failed(Exception): + """ signals a stop as failed test run. """ + + +@attr.s +class _bestrelpath_cache(dict): + path = attr.ib() + + def __missing__(self, path): + r = self.path.bestrelpath(path) + self[path] = r + return r + + +class Session(nodes.FSCollector): + Interrupted = Interrupted + Failed = Failed + + def __init__(self, config): + nodes.FSCollector.__init__( + self, config.rootdir, parent=None, config=config, session=self, nodeid="" + ) + self.testsfailed = 0 + self.testscollected = 0 + self.shouldstop = False + self.shouldfail = False + self.trace = config.trace.root.get("collection") + self._norecursepatterns = config.getini("norecursedirs") + self.startdir = config.invocation_dir + self._initialpaths = frozenset() + # Keep track of any collected nodes in here, so we don't duplicate fixtures + self._node_cache = {} + self._bestrelpathcache = _bestrelpath_cache(config.rootdir) + # Dirnames of pkgs with dunder-init files. + self._pkg_roots = {} + + self.config.pluginmanager.register(self, name="session") + + def __repr__(self): + return "<%s %s exitstatus=%r testsfailed=%d testscollected=%d>" % ( + self.__class__.__name__, + self.name, + getattr(self, "exitstatus", ""), + self.testsfailed, + self.testscollected, + ) + + def _node_location_to_relpath(self, node_path): + # bestrelpath is a quite slow function + return self._bestrelpathcache[node_path] + + @hookimpl(tryfirst=True) + def pytest_collectstart(self): + if self.shouldfail: + raise self.Failed(self.shouldfail) + if self.shouldstop: + raise self.Interrupted(self.shouldstop) + + @hookimpl(tryfirst=True) + def pytest_runtest_logreport(self, report): + if report.failed and not hasattr(report, "wasxfail"): + self.testsfailed += 1 + maxfail = self.config.getvalue("maxfail") + if maxfail and self.testsfailed >= maxfail: + self.shouldfail = "stopping after %d failures" % (self.testsfailed) + + pytest_collectreport = pytest_runtest_logreport + + def isinitpath(self, path): + return path in self._initialpaths + + def gethookproxy(self, fspath): + # check if we have the common case of running + # hooks with all conftest.py files + pm = self.config.pluginmanager + my_conftestmodules = pm._getconftestmodules(fspath) + remove_mods = pm._conftest_plugins.difference(my_conftestmodules) + if remove_mods: + # one or more conftests are not in use at this fspath + proxy = FSHookProxy(fspath, pm, remove_mods) + else: + # all plugis are active for this fspath + proxy = self.config.hook + return proxy + + def perform_collect(self, args=None, genitems=True): + hook = self.config.hook + try: + items = self._perform_collect(args, genitems) + self.config.pluginmanager.check_pending() + hook.pytest_collection_modifyitems( + session=self, config=self.config, items=items + ) + finally: + hook.pytest_collection_finish(session=self) + self.testscollected = len(items) + return items + + def _perform_collect(self, args, genitems): + if args is None: + args = self.config.args + self.trace("perform_collect", self, args) + self.trace.root.indent += 1 + self._notfound = [] + initialpaths = [] + self._initialparts = [] + self.items = items = [] + for arg in args: + parts = self._parsearg(arg) + self._initialparts.append(parts) + initialpaths.append(parts[0]) + self._initialpaths = frozenset(initialpaths) + rep = collect_one_node(self) + self.ihook.pytest_collectreport(report=rep) + self.trace.root.indent -= 1 + if self._notfound: + errors = [] + for arg, exc in self._notfound: + line = "(no name %r in any of %r)" % (arg, exc.args[0]) + errors.append("not found: %s\n%s" % (arg, line)) + # XXX: test this + raise UsageError(*errors) + if not genitems: + return rep.result + else: + if rep.passed: + for node in rep.result: + self.items.extend(self.genitems(node)) + return items + + def collect(self): + for initialpart in self._initialparts: + arg = "::".join(map(str, initialpart)) + self.trace("processing argument", arg) + self.trace.root.indent += 1 + try: + for x in self._collect(arg): + yield x + except NoMatch: + # we are inside a make_report hook so + # we cannot directly pass through the exception + self._notfound.append((arg, sys.exc_info()[1])) + + self.trace.root.indent -= 1 + + def _collect(self, arg): + from _pytest.python import Package + + names = self._parsearg(arg) + argpath = names.pop(0) + + # Start with a Session root, and delve to argpath item (dir or file) + # and stack all Packages found on the way. + # No point in finding packages when collecting doctests + if not self.config.getoption("doctestmodules", False): + pm = self.config.pluginmanager + for parent in reversed(argpath.parts()): + if pm._confcutdir and pm._confcutdir.relto(parent): + break + + if parent.isdir(): + pkginit = parent.join("__init__.py") + if pkginit.isfile(): + if pkginit not in self._node_cache: + col = self._collectfile(pkginit, handle_dupes=False) + if col: + if isinstance(col[0], Package): + self._pkg_roots[parent] = col[0] + # always store a list in the cache, matchnodes expects it + self._node_cache[col[0].fspath] = [col[0]] + + # If it's a directory argument, recurse and look for any Subpackages. + # Let the Package collector deal with subnodes, don't collect here. + if argpath.check(dir=1): + assert not names, "invalid arg %r" % (arg,) + + seen_dirs = set() + for path in argpath.visit( + fil=self._visit_filter, rec=self._recurse, bf=True, sort=True + ): + dirpath = path.dirpath() + if dirpath not in seen_dirs: + # Collect packages first. + seen_dirs.add(dirpath) + pkginit = dirpath.join("__init__.py") + if pkginit.exists(): + for x in self._collectfile(pkginit): + yield x + if isinstance(x, Package): + self._pkg_roots[dirpath] = x + if dirpath in self._pkg_roots: + # Do not collect packages here. + continue + + for x in self._collectfile(path): + key = (type(x), x.fspath) + if key in self._node_cache: + yield self._node_cache[key] + else: + self._node_cache[key] = x + yield x + else: + assert argpath.check(file=1) + + if argpath in self._node_cache: + col = self._node_cache[argpath] + else: + collect_root = self._pkg_roots.get(argpath.dirname, self) + col = collect_root._collectfile(argpath, handle_dupes=False) + if col: + self._node_cache[argpath] = col + m = self.matchnodes(col, names) + # If __init__.py was the only file requested, then the matched node will be + # the corresponding Package, and the first yielded item will be the __init__ + # Module itself, so just use that. If this special case isn't taken, then all + # the files in the package will be yielded. + if argpath.basename == "__init__.py": + try: + yield next(m[0].collect()) + except StopIteration: + # The package collects nothing with only an __init__.py + # file in it, which gets ignored by the default + # "python_files" option. + pass + return + for y in m: + yield y + + def _collectfile(self, path, handle_dupes=True): + assert path.isfile(), "%r is not a file (isdir=%r, exists=%r, islink=%r)" % ( + path, + path.isdir(), + path.exists(), + path.islink(), + ) + ihook = self.gethookproxy(path) + if not self.isinitpath(path): + if ihook.pytest_ignore_collect(path=path, config=self.config): + return () + + if handle_dupes: + keepduplicates = self.config.getoption("keepduplicates") + if not keepduplicates: + duplicate_paths = self.config.pluginmanager._duplicatepaths + if path in duplicate_paths: + return () + else: + duplicate_paths.add(path) + + return ihook.pytest_collect_file(path=path, parent=self) + + def _recurse(self, dirpath): + if dirpath.basename == "__pycache__": + return False + ihook = self.gethookproxy(dirpath.dirpath()) + if ihook.pytest_ignore_collect(path=dirpath, config=self.config): + return False + for pat in self._norecursepatterns: + if dirpath.check(fnmatch=pat): + return False + ihook = self.gethookproxy(dirpath) + ihook.pytest_collect_directory(path=dirpath, parent=self) + return True + + if six.PY2: + + @staticmethod + def _visit_filter(f): + return f.check(file=1) and not f.strpath.endswith("*.pyc") + + else: + + @staticmethod + def _visit_filter(f): + return f.check(file=1) + + def _tryconvertpyarg(self, x): + """Convert a dotted module name to path.""" + try: + with _patched_find_module(): + loader = pkgutil.find_loader(x) + except ImportError: + return x + if loader is None: + return x + # This method is sometimes invoked when AssertionRewritingHook, which + # does not define a get_filename method, is already in place: + try: + with _patched_find_module(): + path = loader.get_filename(x) + except AttributeError: + # Retrieve path from AssertionRewritingHook: + path = loader.modules[x][0].co_filename + if loader.is_package(x): + path = os.path.dirname(path) + return path + + def _parsearg(self, arg): + """ return (fspath, names) tuple after checking the file exists. """ + parts = str(arg).split("::") + if self.config.option.pyargs: + parts[0] = self._tryconvertpyarg(parts[0]) + relpath = parts[0].replace("/", os.sep) + path = self.config.invocation_dir.join(relpath, abs=True) + if not path.check(): + if self.config.option.pyargs: + raise UsageError( + "file or package not found: " + arg + " (missing __init__.py?)" + ) + raise UsageError("file not found: " + arg) + parts[0] = path.realpath() + return parts + + def matchnodes(self, matching, names): + self.trace("matchnodes", matching, names) + self.trace.root.indent += 1 + nodes = self._matchnodes(matching, names) + num = len(nodes) + self.trace("matchnodes finished -> ", num, "nodes") + self.trace.root.indent -= 1 + if num == 0: + raise NoMatch(matching, names[:1]) + return nodes + + def _matchnodes(self, matching, names): + if not matching or not names: + return matching + name = names[0] + assert name + nextnames = names[1:] + resultnodes = [] + for node in matching: + if isinstance(node, nodes.Item): + if not names: + resultnodes.append(node) + continue + assert isinstance(node, nodes.Collector) + key = (type(node), node.nodeid) + if key in self._node_cache: + rep = self._node_cache[key] + else: + rep = collect_one_node(node) + self._node_cache[key] = rep + if rep.passed: + has_matched = False + for x in rep.result: + # TODO: remove parametrized workaround once collection structure contains parametrization + if x.name == name or x.name.split("[")[0] == name: + resultnodes.extend(self.matchnodes([x], nextnames)) + has_matched = True + # XXX accept IDs that don't have "()" for class instances + if not has_matched and len(rep.result) == 1 and x.name == "()": + nextnames.insert(0, name) + resultnodes.extend(self.matchnodes([x], nextnames)) + else: + # report collection failures here to avoid failing to run some test + # specified in the command line because the module could not be + # imported (#134) + node.ihook.pytest_collectreport(report=rep) + return resultnodes + + def genitems(self, node): + self.trace("genitems", node) + if isinstance(node, nodes.Item): + node.ihook.pytest_itemcollected(item=node) + yield node + else: + assert isinstance(node, nodes.Collector) + rep = collect_one_node(node) + if rep.passed: + for subnode in rep.result: + for x in self.genitems(subnode): + yield x + node.ihook.pytest_collectreport(report=rep) diff --git a/venv/lib/python2.7/site-packages/_pytest/mark/__init__.py b/venv/lib/python2.7/site-packages/_pytest/mark/__init__.py new file mode 100644 index 0000000..6bc22fe --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/mark/__init__.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +""" generic mechanism for marking and selecting python functions. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from .legacy import matchkeyword +from .legacy import matchmark +from .structures import EMPTY_PARAMETERSET_OPTION +from .structures import get_empty_parameterset_mark +from .structures import Mark +from .structures import MARK_GEN +from .structures import MarkDecorator +from .structures import MarkGenerator +from .structures import ParameterSet +from _pytest.config import UsageError + +__all__ = ["Mark", "MarkDecorator", "MarkGenerator", "get_empty_parameterset_mark"] + + +def param(*values, **kw): + """Specify a parameter in `pytest.mark.parametrize`_ calls or + :ref:`parametrized fixtures `. + + .. code-block:: python + + @pytest.mark.parametrize("test_input,expected", [ + ("3+5", 8), + pytest.param("6*9", 42, marks=pytest.mark.xfail), + ]) + def test_eval(test_input, expected): + assert eval(test_input) == expected + + :param values: variable args of the values of the parameter set, in order. + :keyword marks: a single mark or a list of marks to be applied to this parameter set. + :keyword str id: the id to attribute to this parameter set. + """ + return ParameterSet.param(*values, **kw) + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group._addoption( + "-k", + action="store", + dest="keyword", + default="", + metavar="EXPRESSION", + help="only run tests which match the given substring expression. " + "An expression is a python evaluatable expression " + "where all names are substring-matched against test names " + "and their parent classes. Example: -k 'test_method or test_" + "other' matches all test functions and classes whose name " + "contains 'test_method' or 'test_other', while -k 'not test_method' " + "matches those that don't contain 'test_method' in their names. " + "-k 'not test_method and not test_other' will eliminate the matches. " + "Additionally keywords are matched to classes and functions " + "containing extra names in their 'extra_keyword_matches' set, " + "as well as functions which have names assigned directly to them.", + ) + + group._addoption( + "-m", + action="store", + dest="markexpr", + default="", + metavar="MARKEXPR", + help="only run tests matching given mark expression. " + "example: -m 'mark1 and not mark2'.", + ) + + group.addoption( + "--markers", + action="store_true", + help="show markers (builtin, plugin and per-project ones).", + ) + + parser.addini("markers", "markers for test functions", "linelist") + parser.addini(EMPTY_PARAMETERSET_OPTION, "default marker for empty parametersets") + + +def pytest_cmdline_main(config): + import _pytest.config + + if config.option.markers: + config._do_configure() + tw = _pytest.config.create_terminal_writer(config) + for line in config.getini("markers"): + parts = line.split(":", 1) + name = parts[0] + rest = parts[1] if len(parts) == 2 else "" + tw.write("@pytest.mark.%s:" % name, bold=True) + tw.line(rest) + tw.line() + config._ensure_unconfigure() + return 0 + + +pytest_cmdline_main.tryfirst = True + + +def deselect_by_keyword(items, config): + keywordexpr = config.option.keyword.lstrip() + if not keywordexpr: + return + + if keywordexpr.startswith("-"): + keywordexpr = "not " + keywordexpr[1:] + selectuntil = False + if keywordexpr[-1:] == ":": + selectuntil = True + keywordexpr = keywordexpr[:-1] + + remaining = [] + deselected = [] + for colitem in items: + if keywordexpr and not matchkeyword(colitem, keywordexpr): + deselected.append(colitem) + else: + if selectuntil: + keywordexpr = None + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +def deselect_by_mark(items, config): + matchexpr = config.option.markexpr + if not matchexpr: + return + + remaining = [] + deselected = [] + for item in items: + if matchmark(item, matchexpr): + remaining.append(item) + else: + deselected.append(item) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +def pytest_collection_modifyitems(items, config): + deselect_by_keyword(items, config) + deselect_by_mark(items, config) + + +def pytest_configure(config): + config._old_mark_config = MARK_GEN._config + MARK_GEN._config = config + + empty_parameterset = config.getini(EMPTY_PARAMETERSET_OPTION) + + if empty_parameterset not in ("skip", "xfail", "fail_at_collect", None, ""): + raise UsageError( + "{!s} must be one of skip, xfail or fail_at_collect" + " but it is {!r}".format(EMPTY_PARAMETERSET_OPTION, empty_parameterset) + ) + + +def pytest_unconfigure(config): + MARK_GEN._config = getattr(config, "_old_mark_config", None) diff --git a/venv/lib/python2.7/site-packages/_pytest/mark/evaluate.py b/venv/lib/python2.7/site-packages/_pytest/mark/evaluate.py new file mode 100644 index 0000000..506546e --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/mark/evaluate.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +import os +import platform +import sys +import traceback + +import six + +from ..outcomes import fail +from ..outcomes import TEST_OUTCOME + + +def cached_eval(config, expr, d): + if not hasattr(config, "_evalcache"): + config._evalcache = {} + try: + return config._evalcache[expr] + except KeyError: + import _pytest._code + + exprcode = _pytest._code.compile(expr, mode="eval") + config._evalcache[expr] = x = eval(exprcode, d) + return x + + +class MarkEvaluator(object): + def __init__(self, item, name): + self.item = item + self._marks = None + self._mark = None + self._mark_name = name + + def __bool__(self): + # dont cache here to prevent staleness + return bool(self._get_marks()) + + __nonzero__ = __bool__ + + def wasvalid(self): + return not hasattr(self, "exc") + + def _get_marks(self): + return list(self.item.iter_markers(name=self._mark_name)) + + def invalidraise(self, exc): + raises = self.get("raises") + if not raises: + return + return not isinstance(exc, raises) + + def istrue(self): + try: + return self._istrue() + except TEST_OUTCOME: + self.exc = sys.exc_info() + if isinstance(self.exc[1], SyntaxError): + msg = [" " * (self.exc[1].offset + 4) + "^"] + msg.append("SyntaxError: invalid syntax") + else: + msg = traceback.format_exception_only(*self.exc[:2]) + fail( + "Error evaluating %r expression\n" + " %s\n" + "%s" % (self._mark_name, self.expr, "\n".join(msg)), + pytrace=False, + ) + + def _getglobals(self): + d = {"os": os, "sys": sys, "platform": platform, "config": self.item.config} + if hasattr(self.item, "obj"): + d.update(self.item.obj.__globals__) + return d + + def _istrue(self): + if hasattr(self, "result"): + return self.result + self._marks = self._get_marks() + + if self._marks: + self.result = False + for mark in self._marks: + self._mark = mark + if "condition" in mark.kwargs: + args = (mark.kwargs["condition"],) + else: + args = mark.args + + for expr in args: + self.expr = expr + if isinstance(expr, six.string_types): + d = self._getglobals() + result = cached_eval(self.item.config, expr, d) + else: + if "reason" not in mark.kwargs: + # XXX better be checked at collection time + msg = ( + "you need to specify reason=STRING " + "when using booleans as conditions." + ) + fail(msg) + result = bool(expr) + if result: + self.result = True + self.reason = mark.kwargs.get("reason", None) + self.expr = expr + return self.result + + if not args: + self.result = True + self.reason = mark.kwargs.get("reason", None) + return self.result + return False + + def get(self, attr, default=None): + if self._mark is None: + return default + return self._mark.kwargs.get(attr, default) + + def getexplanation(self): + expl = getattr(self, "reason", None) or self.get("reason", None) + if not expl: + if not hasattr(self, "expr"): + return "" + else: + return "condition: " + str(self.expr) + return expl diff --git a/venv/lib/python2.7/site-packages/_pytest/mark/legacy.py b/venv/lib/python2.7/site-packages/_pytest/mark/legacy.py new file mode 100644 index 0000000..c56482f --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/mark/legacy.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +""" +this is a place where we put datastructures used by legacy apis +we hope ot remove +""" +import keyword + +import attr + +from _pytest.config import UsageError + + +@attr.s +class MarkMapping(object): + """Provides a local mapping for markers where item access + resolves to True if the marker is present. """ + + own_mark_names = attr.ib() + + @classmethod + def from_item(cls, item): + mark_names = {mark.name for mark in item.iter_markers()} + return cls(mark_names) + + def __getitem__(self, name): + return name in self.own_mark_names + + +class KeywordMapping(object): + """Provides a local mapping for keywords. + Given a list of names, map any substring of one of these names to True. + """ + + def __init__(self, names): + self._names = names + + @classmethod + def from_item(cls, item): + mapped_names = set() + + # Add the names of the current item and any parent items + import pytest + + for item in item.listchain(): + if not isinstance(item, pytest.Instance): + mapped_names.add(item.name) + + # Add the names added as extra keywords to current or parent items + mapped_names.update(item.listextrakeywords()) + + # Add the names attached to the current function through direct assignment + if hasattr(item, "function"): + mapped_names.update(item.function.__dict__) + + # add the markers to the keywords as we no longer handle them correctly + mapped_names.update(mark.name for mark in item.iter_markers()) + + return cls(mapped_names) + + def __getitem__(self, subname): + for name in self._names: + if subname in name: + return True + return False + + +python_keywords_allowed_list = ["or", "and", "not"] + + +def matchmark(colitem, markexpr): + """Tries to match on any marker names, attached to the given colitem.""" + try: + return eval(markexpr, {}, MarkMapping.from_item(colitem)) + except SyntaxError as e: + raise SyntaxError(str(e) + "\nMarker expression must be valid Python!") + + +def matchkeyword(colitem, keywordexpr): + """Tries to match given keyword expression to given collector item. + + Will match on the name of colitem, including the names of its parents. + Only matches names of items which are either a :class:`Class` or a + :class:`Function`. + Additionally, matches on names in the 'extra_keyword_matches' set of + any item, as well as names directly assigned to test functions. + """ + mapping = KeywordMapping.from_item(colitem) + if " " not in keywordexpr: + # special case to allow for simple "-k pass" and "-k 1.3" + return mapping[keywordexpr] + elif keywordexpr.startswith("not ") and " " not in keywordexpr[4:]: + return not mapping[keywordexpr[4:]] + for kwd in keywordexpr.split(): + if keyword.iskeyword(kwd) and kwd not in python_keywords_allowed_list: + raise UsageError( + "Python keyword '{}' not accepted in expressions passed to '-k'".format( + kwd + ) + ) + try: + return eval(keywordexpr, {}, mapping) + except SyntaxError: + raise UsageError("Wrong expression passed to '-k': {}".format(keywordexpr)) diff --git a/venv/lib/python2.7/site-packages/_pytest/mark/structures.py b/venv/lib/python2.7/site-packages/_pytest/mark/structures.py new file mode 100644 index 0000000..0ccd814 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/mark/structures.py @@ -0,0 +1,410 @@ +# -*- coding: utf-8 -*- +import inspect +import warnings +from collections import namedtuple +from operator import attrgetter + +import attr +import six + +from ..compat import ascii_escaped +from ..compat import ATTRS_EQ_FIELD +from ..compat import getfslineno +from ..compat import MappingMixin +from ..compat import NOTSET +from _pytest.deprecated import PYTEST_PARAM_UNKNOWN_KWARGS +from _pytest.outcomes import fail +from _pytest.warning_types import PytestUnknownMarkWarning + +EMPTY_PARAMETERSET_OPTION = "empty_parameter_set_mark" + + +def alias(name, warning=None): + getter = attrgetter(name) + + def warned(self): + warnings.warn(warning, stacklevel=2) + return getter(self) + + return property(getter if warning is None else warned, doc="alias for " + name) + + +def istestfunc(func): + return ( + hasattr(func, "__call__") + and getattr(func, "__name__", "") != "" + ) + + +def get_empty_parameterset_mark(config, argnames, func): + from ..nodes import Collector + + requested_mark = config.getini(EMPTY_PARAMETERSET_OPTION) + if requested_mark in ("", None, "skip"): + mark = MARK_GEN.skip + elif requested_mark == "xfail": + mark = MARK_GEN.xfail(run=False) + elif requested_mark == "fail_at_collect": + f_name = func.__name__ + _, lineno = getfslineno(func) + raise Collector.CollectError( + "Empty parameter set in '%s' at line %d" % (f_name, lineno + 1) + ) + else: + raise LookupError(requested_mark) + fs, lineno = getfslineno(func) + reason = "got empty parameter set %r, function %s at %s:%d" % ( + argnames, + func.__name__, + fs, + lineno, + ) + return mark(reason=reason) + + +class ParameterSet(namedtuple("ParameterSet", "values, marks, id")): + @classmethod + def param(cls, *values, **kwargs): + marks = kwargs.pop("marks", ()) + if isinstance(marks, MarkDecorator): + marks = (marks,) + else: + assert isinstance(marks, (tuple, list, set)) + + id_ = kwargs.pop("id", None) + if id_ is not None: + if not isinstance(id_, six.string_types): + raise TypeError( + "Expected id to be a string, got {}: {!r}".format(type(id_), id_) + ) + id_ = ascii_escaped(id_) + + if kwargs: + warnings.warn( + PYTEST_PARAM_UNKNOWN_KWARGS.format(args=sorted(kwargs)), stacklevel=3 + ) + return cls(values, marks, id_) + + @classmethod + def extract_from(cls, parameterset, force_tuple=False): + """ + :param parameterset: + a legacy style parameterset that may or may not be a tuple, + and may or may not be wrapped into a mess of mark objects + + :param force_tuple: + enforce tuple wrapping so single argument tuple values + don't get decomposed and break tests + """ + + if isinstance(parameterset, cls): + return parameterset + if force_tuple: + return cls.param(parameterset) + else: + return cls(parameterset, marks=[], id=None) + + @staticmethod + def _parse_parametrize_args(argnames, argvalues, *args, **kwargs): + if not isinstance(argnames, (tuple, list)): + argnames = [x.strip() for x in argnames.split(",") if x.strip()] + force_tuple = len(argnames) == 1 + else: + force_tuple = False + return argnames, force_tuple + + @staticmethod + def _parse_parametrize_parameters(argvalues, force_tuple): + return [ + ParameterSet.extract_from(x, force_tuple=force_tuple) for x in argvalues + ] + + @classmethod + def _for_parametrize(cls, argnames, argvalues, func, config, function_definition): + argnames, force_tuple = cls._parse_parametrize_args(argnames, argvalues) + parameters = cls._parse_parametrize_parameters(argvalues, force_tuple) + del argvalues + + if parameters: + # check all parameter sets have the correct number of values + for param in parameters: + if len(param.values) != len(argnames): + msg = ( + '{nodeid}: in "parametrize" the number of names ({names_len}):\n' + " {names}\n" + "must be equal to the number of values ({values_len}):\n" + " {values}" + ) + fail( + msg.format( + nodeid=function_definition.nodeid, + values=param.values, + names=argnames, + names_len=len(argnames), + values_len=len(param.values), + ), + pytrace=False, + ) + else: + # empty parameter set (likely computed at runtime): create a single + # parameter set with NOTSET values, with the "empty parameter set" mark applied to it + mark = get_empty_parameterset_mark(config, argnames, func) + parameters.append( + ParameterSet(values=(NOTSET,) * len(argnames), marks=[mark], id=None) + ) + return argnames, parameters + + +@attr.s(frozen=True) +class Mark(object): + #: name of the mark + name = attr.ib(type=str) + #: positional arguments of the mark decorator + args = attr.ib() # List[object] + #: keyword arguments of the mark decorator + kwargs = attr.ib() # Dict[str, object] + + def combined_with(self, other): + """ + :param other: the mark to combine with + :type other: Mark + :rtype: Mark + + combines by appending args and merging the mappings + """ + assert self.name == other.name + return Mark( + self.name, self.args + other.args, dict(self.kwargs, **other.kwargs) + ) + + +@attr.s +class MarkDecorator(object): + """ A decorator for test functions and test classes. When applied + it will create :class:`MarkInfo` objects which may be + :ref:`retrieved by hooks as item keywords `. + MarkDecorator instances are often created like this:: + + mark1 = pytest.mark.NAME # simple MarkDecorator + mark2 = pytest.mark.NAME(name1=value) # parametrized MarkDecorator + + and can then be applied as decorators to test functions:: + + @mark2 + def test_function(): + pass + + When a MarkDecorator instance is called it does the following: + 1. If called with a single class as its only positional argument and no + additional keyword arguments, it attaches itself to the class so it + gets applied automatically to all test cases found in that class. + 2. If called with a single function as its only positional argument and + no additional keyword arguments, it attaches a MarkInfo object to the + function, containing all the arguments already stored internally in + the MarkDecorator. + 3. When called in any other case, it performs a 'fake construction' call, + i.e. it returns a new MarkDecorator instance with the original + MarkDecorator's content updated with the arguments passed to this + call. + + Note: The rules above prevent MarkDecorator objects from storing only a + single function or class reference as their positional argument with no + additional keyword or positional arguments. + + """ + + mark = attr.ib(validator=attr.validators.instance_of(Mark)) + + name = alias("mark.name") + args = alias("mark.args") + kwargs = alias("mark.kwargs") + + @property + def markname(self): + return self.name # for backward-compat (2.4.1 had this attr) + + def __eq__(self, other): + return self.mark == other.mark if isinstance(other, MarkDecorator) else False + + def __repr__(self): + return "" % (self.mark,) + + def with_args(self, *args, **kwargs): + """ return a MarkDecorator with extra arguments added + + unlike call this can be used even if the sole argument is a callable/class + + :return: MarkDecorator + """ + + mark = Mark(self.name, args, kwargs) + return self.__class__(self.mark.combined_with(mark)) + + def __call__(self, *args, **kwargs): + """ if passed a single callable argument: decorate it with mark info. + otherwise add *args/**kwargs in-place to mark information. """ + if args and not kwargs: + func = args[0] + is_class = inspect.isclass(func) + if len(args) == 1 and (istestfunc(func) or is_class): + store_mark(func, self.mark) + return func + return self.with_args(*args, **kwargs) + + +def get_unpacked_marks(obj): + """ + obtain the unpacked marks that are stored on an object + """ + mark_list = getattr(obj, "pytestmark", []) + if not isinstance(mark_list, list): + mark_list = [mark_list] + return normalize_mark_list(mark_list) + + +def normalize_mark_list(mark_list): + """ + normalizes marker decorating helpers to mark objects + + :type mark_list: List[Union[Mark, Markdecorator]] + :rtype: List[Mark] + """ + extracted = [ + getattr(mark, "mark", mark) for mark in mark_list + ] # unpack MarkDecorator + for mark in extracted: + if not isinstance(mark, Mark): + raise TypeError("got {!r} instead of Mark".format(mark)) + return [x for x in extracted if isinstance(x, Mark)] + + +def store_mark(obj, mark): + """store a Mark on an object + this is used to implement the Mark declarations/decorators correctly + """ + assert isinstance(mark, Mark), mark + # always reassign name to avoid updating pytestmark + # in a reference that was only borrowed + obj.pytestmark = get_unpacked_marks(obj) + [mark] + + +class MarkGenerator(object): + """ Factory for :class:`MarkDecorator` objects - exposed as + a ``pytest.mark`` singleton instance. Example:: + + import pytest + @pytest.mark.slowtest + def test_function(): + pass + + will set a 'slowtest' :class:`MarkInfo` object + on the ``test_function`` object. """ + + _config = None + _markers = set() + + def __getattr__(self, name): + if name[0] == "_": + raise AttributeError("Marker name must NOT start with underscore") + + if self._config is not None: + # We store a set of markers as a performance optimisation - if a mark + # name is in the set we definitely know it, but a mark may be known and + # not in the set. We therefore start by updating the set! + if name not in self._markers: + for line in self._config.getini("markers"): + # example lines: "skipif(condition): skip the given test if..." + # or "hypothesis: tests which use Hypothesis", so to get the + # marker name we split on both `:` and `(`. + marker = line.split(":")[0].split("(")[0].strip() + self._markers.add(marker) + + # If the name is not in the set of known marks after updating, + # then it really is time to issue a warning or an error. + if name not in self._markers: + if self._config.option.strict_markers: + fail( + "{!r} not found in `markers` configuration option".format(name), + pytrace=False, + ) + else: + warnings.warn( + "Unknown pytest.mark.%s - is this a typo? You can register " + "custom marks to avoid this warning - for details, see " + "https://docs.pytest.org/en/latest/mark.html" % name, + PytestUnknownMarkWarning, + ) + + return MarkDecorator(Mark(name, (), {})) + + +MARK_GEN = MarkGenerator() + + +class NodeKeywords(MappingMixin): + def __init__(self, node): + self.node = node + self.parent = node.parent + self._markers = {node.name: True} + + def __getitem__(self, key): + try: + return self._markers[key] + except KeyError: + if self.parent is None: + raise + return self.parent.keywords[key] + + def __setitem__(self, key, value): + self._markers[key] = value + + def __delitem__(self, key): + raise ValueError("cannot delete key in keywords dict") + + def __iter__(self): + seen = self._seen() + return iter(seen) + + def _seen(self): + seen = set(self._markers) + if self.parent is not None: + seen.update(self.parent.keywords) + return seen + + def __len__(self): + return len(self._seen()) + + def __repr__(self): + return "" % (self.node,) + + +# mypy cannot find this overload, remove when on attrs>=19.2 +@attr.s(hash=False, **{ATTRS_EQ_FIELD: False}) # type: ignore +class NodeMarkers(object): + """ + internal structure for storing marks belonging to a node + + ..warning:: + + unstable api + + """ + + own_markers = attr.ib(default=attr.Factory(list)) + + def update(self, add_markers): + """update the own markers + """ + self.own_markers.extend(add_markers) + + def find(self, name): + """ + find markers in own nodes or parent nodes + needs a better place + """ + for mark in self.own_markers: + if mark.name == name: + yield mark + + def __iter__(self): + return iter(self.own_markers) diff --git a/venv/lib/python2.7/site-packages/_pytest/monkeypatch.py b/venv/lib/python2.7/site-packages/_pytest/monkeypatch.py new file mode 100644 index 0000000..e8671b0 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/monkeypatch.py @@ -0,0 +1,336 @@ +# -*- coding: utf-8 -*- +""" monkeypatching and mocking functionality. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import re +import sys +import warnings +from contextlib import contextmanager + +import six + +import pytest +from _pytest.fixtures import fixture +from _pytest.pathlib import Path + +RE_IMPORT_ERROR_NAME = re.compile(r"^No module named (.*)$") + + +@fixture +def monkeypatch(): + """The returned ``monkeypatch`` fixture provides these + helper methods to modify objects, dictionaries or os.environ:: + + monkeypatch.setattr(obj, name, value, raising=True) + monkeypatch.delattr(obj, name, raising=True) + monkeypatch.setitem(mapping, name, value) + monkeypatch.delitem(obj, name, raising=True) + monkeypatch.setenv(name, value, prepend=False) + monkeypatch.delenv(name, raising=True) + monkeypatch.syspath_prepend(path) + monkeypatch.chdir(path) + + All modifications will be undone after the requesting + test function or fixture has finished. The ``raising`` + parameter determines if a KeyError or AttributeError + will be raised if the set/deletion operation has no target. + """ + mpatch = MonkeyPatch() + yield mpatch + mpatch.undo() + + +def resolve(name): + # simplified from zope.dottedname + parts = name.split(".") + + used = parts.pop(0) + found = __import__(used) + for part in parts: + used += "." + part + try: + found = getattr(found, part) + except AttributeError: + pass + else: + continue + # we use explicit un-nesting of the handling block in order + # to avoid nested exceptions on python 3 + try: + __import__(used) + except ImportError as ex: + # str is used for py2 vs py3 + expected = str(ex).split()[-1] + if expected == used: + raise + else: + raise ImportError("import error in %s: %s" % (used, ex)) + found = annotated_getattr(found, part, used) + return found + + +def annotated_getattr(obj, name, ann): + try: + obj = getattr(obj, name) + except AttributeError: + raise AttributeError( + "%r object at %s has no attribute %r" % (type(obj).__name__, ann, name) + ) + return obj + + +def derive_importpath(import_path, raising): + if not isinstance(import_path, six.string_types) or "." not in import_path: + raise TypeError("must be absolute import path string, not %r" % (import_path,)) + module, attr = import_path.rsplit(".", 1) + target = resolve(module) + if raising: + annotated_getattr(target, attr, ann=module) + return attr, target + + +class Notset(object): + def __repr__(self): + return "" + + +notset = Notset() + + +class MonkeyPatch(object): + """ Object returned by the ``monkeypatch`` fixture keeping a record of setattr/item/env/syspath changes. + """ + + def __init__(self): + self._setattr = [] + self._setitem = [] + self._cwd = None + self._savesyspath = None + + @contextmanager + def context(self): + """ + Context manager that returns a new :class:`MonkeyPatch` object which + undoes any patching done inside the ``with`` block upon exit: + + .. code-block:: python + + import functools + def test_partial(monkeypatch): + with monkeypatch.context() as m: + m.setattr(functools, "partial", 3) + + Useful in situations where it is desired to undo some patches before the test ends, + such as mocking ``stdlib`` functions that might break pytest itself if mocked (for examples + of this see `#3290 `_. + """ + m = MonkeyPatch() + try: + yield m + finally: + m.undo() + + def setattr(self, target, name, value=notset, raising=True): + """ Set attribute value on target, memorizing the old value. + By default raise AttributeError if the attribute did not exist. + + For convenience you can specify a string as ``target`` which + will be interpreted as a dotted import path, with the last part + being the attribute name. Example: + ``monkeypatch.setattr("os.getcwd", lambda: "/")`` + would set the ``getcwd`` function of the ``os`` module. + + The ``raising`` value determines if the setattr should fail + if the attribute is not already present (defaults to True + which means it will raise). + """ + __tracebackhide__ = True + import inspect + + if value is notset: + if not isinstance(target, six.string_types): + raise TypeError( + "use setattr(target, name, value) or " + "setattr(target, value) with target being a dotted " + "import string" + ) + value = name + name, target = derive_importpath(target, raising) + + oldval = getattr(target, name, notset) + if raising and oldval is notset: + raise AttributeError("%r has no attribute %r" % (target, name)) + + # avoid class descriptors like staticmethod/classmethod + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + setattr(target, name, value) + + def delattr(self, target, name=notset, raising=True): + """ Delete attribute ``name`` from ``target``, by default raise + AttributeError it the attribute did not previously exist. + + If no ``name`` is specified and ``target`` is a string + it will be interpreted as a dotted import path with the + last part being the attribute name. + + If ``raising`` is set to False, no exception will be raised if the + attribute is missing. + """ + __tracebackhide__ = True + import inspect + + if name is notset: + if not isinstance(target, six.string_types): + raise TypeError( + "use delattr(target, name) or " + "delattr(target) with target being a dotted " + "import string" + ) + name, target = derive_importpath(target, raising) + + if not hasattr(target, name): + if raising: + raise AttributeError(name) + else: + oldval = getattr(target, name, notset) + # Avoid class descriptors like staticmethod/classmethod. + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + delattr(target, name) + + def setitem(self, dic, name, value): + """ Set dictionary entry ``name`` to value. """ + self._setitem.append((dic, name, dic.get(name, notset))) + dic[name] = value + + def delitem(self, dic, name, raising=True): + """ Delete ``name`` from dict. Raise KeyError if it doesn't exist. + + If ``raising`` is set to False, no exception will be raised if the + key is missing. + """ + if name not in dic: + if raising: + raise KeyError(name) + else: + self._setitem.append((dic, name, dic.get(name, notset))) + del dic[name] + + def _warn_if_env_name_is_not_str(self, name): + """On Python 2, warn if the given environment variable name is not a native str (#4056)""" + if six.PY2 and not isinstance(name, str): + warnings.warn( + pytest.PytestWarning( + "Environment variable name {!r} should be str".format(name) + ) + ) + + def setenv(self, name, value, prepend=None): + """ Set environment variable ``name`` to ``value``. If ``prepend`` + is a character, read the current environment variable value + and prepend the ``value`` adjoined with the ``prepend`` character.""" + if not isinstance(value, str): + warnings.warn( + pytest.PytestWarning( + "Value of environment variable {name} type should be str, but got " + "{value!r} (type: {type}); converted to str implicitly".format( + name=name, value=value, type=type(value).__name__ + ) + ), + stacklevel=2, + ) + value = str(value) + if prepend and name in os.environ: + value = value + prepend + os.environ[name] + self._warn_if_env_name_is_not_str(name) + self.setitem(os.environ, name, value) + + def delenv(self, name, raising=True): + """ Delete ``name`` from the environment. Raise KeyError if it does + not exist. + + If ``raising`` is set to False, no exception will be raised if the + environment variable is missing. + """ + self._warn_if_env_name_is_not_str(name) + self.delitem(os.environ, name, raising=raising) + + def syspath_prepend(self, path): + """ Prepend ``path`` to ``sys.path`` list of import locations. """ + from pkg_resources import fixup_namespace_packages + + if self._savesyspath is None: + self._savesyspath = sys.path[:] + sys.path.insert(0, str(path)) + + # https://github.com/pypa/setuptools/blob/d8b901bc/docs/pkg_resources.txt#L162-L171 + fixup_namespace_packages(str(path)) + + # A call to syspathinsert() usually means that the caller wants to + # import some dynamically created files, thus with python3 we + # invalidate its import caches. + # This is especially important when any namespace package is in used, + # since then the mtime based FileFinder cache (that gets created in + # this case already) gets not invalidated when writing the new files + # quickly afterwards. + if sys.version_info >= (3, 3): + from importlib import invalidate_caches + + invalidate_caches() + + def chdir(self, path): + """ Change the current working directory to the specified path. + Path can be a string or a py.path.local object. + """ + if self._cwd is None: + self._cwd = os.getcwd() + if hasattr(path, "chdir"): + path.chdir() + elif isinstance(path, Path): + # modern python uses the fspath protocol here LEGACY + os.chdir(str(path)) + else: + os.chdir(path) + + def undo(self): + """ Undo previous changes. This call consumes the + undo stack. Calling it a second time has no effect unless + you do more monkeypatching after the undo call. + + There is generally no need to call `undo()`, since it is + called automatically during tear-down. + + Note that the same `monkeypatch` fixture is used across a + single test function invocation. If `monkeypatch` is used both by + the test function itself and one of the test fixtures, + calling `undo()` will undo all of the changes made in + both functions. + """ + for obj, name, value in reversed(self._setattr): + if value is not notset: + setattr(obj, name, value) + else: + delattr(obj, name) + self._setattr[:] = [] + for dictionary, name, value in reversed(self._setitem): + if value is notset: + try: + del dictionary[name] + except KeyError: + pass # was already deleted, so we have the desired state + else: + dictionary[name] = value + self._setitem[:] = [] + if self._savesyspath is not None: + sys.path[:] = self._savesyspath + self._savesyspath = None + + if self._cwd is not None: + os.chdir(self._cwd) + self._cwd = None diff --git a/venv/lib/python2.7/site-packages/_pytest/nodes.py b/venv/lib/python2.7/site-packages/_pytest/nodes.py new file mode 100644 index 0000000..206e9ae --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/nodes.py @@ -0,0 +1,429 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import warnings + +import py +import six + +import _pytest._code +from _pytest.compat import getfslineno +from _pytest.mark.structures import NodeKeywords +from _pytest.outcomes import fail + +SEP = "/" + +tracebackcutdir = py.path.local(_pytest.__file__).dirpath() + + +def _splitnode(nodeid): + """Split a nodeid into constituent 'parts'. + + Node IDs are strings, and can be things like: + '' + 'testing/code' + 'testing/code/test_excinfo.py' + 'testing/code/test_excinfo.py::TestFormattedExcinfo' + + Return values are lists e.g. + [] + ['testing', 'code'] + ['testing', 'code', 'test_excinfo.py'] + ['testing', 'code', 'test_excinfo.py', 'TestFormattedExcinfo', '()'] + """ + if nodeid == "": + # If there is no root node at all, return an empty list so the caller's logic can remain sane + return [] + parts = nodeid.split(SEP) + # Replace single last element 'test_foo.py::Bar' with multiple elements 'test_foo.py', 'Bar' + parts[-1:] = parts[-1].split("::") + return parts + + +def ischildnode(baseid, nodeid): + """Return True if the nodeid is a child node of the baseid. + + E.g. 'foo/bar::Baz' is a child of 'foo', 'foo/bar' and 'foo/bar::Baz', but not of 'foo/blorp' + """ + base_parts = _splitnode(baseid) + node_parts = _splitnode(nodeid) + if len(node_parts) < len(base_parts): + return False + return node_parts[: len(base_parts)] == base_parts + + +class Node(object): + """ base class for Collector and Item the test collection tree. + Collector subclasses have children, Items are terminal nodes.""" + + def __init__( + self, name, parent=None, config=None, session=None, fspath=None, nodeid=None + ): + #: a unique name within the scope of the parent node + self.name = name + + #: the parent collector node. + self.parent = parent + + #: the pytest config object + self.config = config or parent.config + + #: the session this node is part of + self.session = session or parent.session + + #: filesystem path where this node was collected from (can be None) + self.fspath = fspath or getattr(parent, "fspath", None) + + #: keywords/markers collected from all scopes + self.keywords = NodeKeywords(self) + + #: the marker objects belonging to this node + self.own_markers = [] + + #: allow adding of extra keywords to use for matching + self.extra_keyword_matches = set() + + # used for storing artificial fixturedefs for direct parametrization + self._name2pseudofixturedef = {} + + if nodeid is not None: + assert "::()" not in nodeid + self._nodeid = nodeid + else: + self._nodeid = self.parent.nodeid + if self.name != "()": + self._nodeid += "::" + self.name + + @property + def ihook(self): + """ fspath sensitive hook proxy used to call pytest hooks""" + return self.session.gethookproxy(self.fspath) + + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, getattr(self, "name", None)) + + def warn(self, warning): + """Issue a warning for this item. + + Warnings will be displayed after the test session, unless explicitly suppressed + + :param Warning warning: the warning instance to issue. Must be a subclass of PytestWarning. + + :raise ValueError: if ``warning`` instance is not a subclass of PytestWarning. + + Example usage: + + .. code-block:: python + + node.warn(PytestWarning("some message")) + + """ + from _pytest.warning_types import PytestWarning + + if not isinstance(warning, PytestWarning): + raise ValueError( + "warning must be an instance of PytestWarning or subclass, got {!r}".format( + warning + ) + ) + path, lineno = get_fslocation_from_item(self) + warnings.warn_explicit( + warning, + category=None, + filename=str(path), + lineno=lineno + 1 if lineno is not None else None, + ) + + # methods for ordering nodes + @property + def nodeid(self): + """ a ::-separated string denoting its collection tree address. """ + return self._nodeid + + def __hash__(self): + return hash(self.nodeid) + + def setup(self): + pass + + def teardown(self): + pass + + def listchain(self): + """ return list of all parent collectors up to self, + starting from root of collection tree. """ + chain = [] + item = self + while item is not None: + chain.append(item) + item = item.parent + chain.reverse() + return chain + + def add_marker(self, marker, append=True): + """dynamically add a marker object to the node. + + :type marker: ``str`` or ``pytest.mark.*`` object + :param marker: + ``append=True`` whether to append the marker, + if ``False`` insert at position ``0``. + """ + from _pytest.mark import MarkDecorator, MARK_GEN + + if isinstance(marker, six.string_types): + marker = getattr(MARK_GEN, marker) + elif not isinstance(marker, MarkDecorator): + raise ValueError("is not a string or pytest.mark.* Marker") + self.keywords[marker.name] = marker + if append: + self.own_markers.append(marker.mark) + else: + self.own_markers.insert(0, marker.mark) + + def iter_markers(self, name=None): + """ + :param name: if given, filter the results by the name attribute + + iterate over all markers of the node + """ + return (x[1] for x in self.iter_markers_with_node(name=name)) + + def iter_markers_with_node(self, name=None): + """ + :param name: if given, filter the results by the name attribute + + iterate over all markers of the node + returns sequence of tuples (node, mark) + """ + for node in reversed(self.listchain()): + for mark in node.own_markers: + if name is None or getattr(mark, "name", None) == name: + yield node, mark + + def get_closest_marker(self, name, default=None): + """return the first marker matching the name, from closest (for example function) to farther level (for example + module level). + + :param default: fallback return value of no marker was found + :param name: name to filter by + """ + return next(self.iter_markers(name=name), default) + + def listextrakeywords(self): + """ Return a set of all extra keywords in self and any parents.""" + extra_keywords = set() + for item in self.listchain(): + extra_keywords.update(item.extra_keyword_matches) + return extra_keywords + + def listnames(self): + return [x.name for x in self.listchain()] + + def addfinalizer(self, fin): + """ register a function to be called when this node is finalized. + + This method can only be called when this node is active + in a setup chain, for example during self.setup(). + """ + self.session._setupstate.addfinalizer(fin, self) + + def getparent(self, cls): + """ get the next parent node (including ourself) + which is an instance of the given class""" + current = self + while current and not isinstance(current, cls): + current = current.parent + return current + + def _prunetraceback(self, excinfo): + pass + + def _repr_failure_py(self, excinfo, style=None): + if excinfo.errisinstance(fail.Exception): + if not excinfo.value.pytrace: + return six.text_type(excinfo.value) + fm = self.session._fixturemanager + if excinfo.errisinstance(fm.FixtureLookupError): + return excinfo.value.formatrepr() + tbfilter = True + if self.config.getoption("fulltrace", False): + style = "long" + else: + tb = _pytest._code.Traceback([excinfo.traceback[-1]]) + self._prunetraceback(excinfo) + if len(excinfo.traceback) == 0: + excinfo.traceback = tb + tbfilter = False # prunetraceback already does it + if style == "auto": + style = "long" + # XXX should excinfo.getrepr record all data and toterminal() process it? + if style is None: + if self.config.getoption("tbstyle", "auto") == "short": + style = "short" + else: + style = "long" + + if self.config.getoption("verbose", 0) > 1: + truncate_locals = False + else: + truncate_locals = True + + try: + os.getcwd() + abspath = False + except OSError: + abspath = True + + return excinfo.getrepr( + funcargs=True, + abspath=abspath, + showlocals=self.config.getoption("showlocals", False), + style=style, + tbfilter=tbfilter, + truncate_locals=truncate_locals, + ) + + repr_failure = _repr_failure_py + + +def get_fslocation_from_item(item): + """Tries to extract the actual location from an item, depending on available attributes: + + * "fslocation": a pair (path, lineno) + * "obj": a Python object that the item wraps. + * "fspath": just a path + + :rtype: a tuple of (str|LocalPath, int) with filename and line number. + """ + result = getattr(item, "location", None) + if result is not None: + return result[:2] + obj = getattr(item, "obj", None) + if obj is not None: + return getfslineno(obj) + return getattr(item, "fspath", "unknown location"), -1 + + +class Collector(Node): + """ Collector instances create children through collect() + and thus iteratively build a tree. + """ + + class CollectError(Exception): + """ an error during collection, contains a custom message. """ + + def collect(self): + """ returns a list of children (items and collectors) + for this collection node. + """ + raise NotImplementedError("abstract") + + def repr_failure(self, excinfo): + """ represent a collection failure. """ + if excinfo.errisinstance(self.CollectError): + exc = excinfo.value + return str(exc.args[0]) + + # Respect explicit tbstyle option, but default to "short" + # (None._repr_failure_py defaults to "long" without "fulltrace" option). + tbstyle = self.config.getoption("tbstyle", "auto") + if tbstyle == "auto": + tbstyle = "short" + + return self._repr_failure_py(excinfo, style=tbstyle) + + def _prunetraceback(self, excinfo): + if hasattr(self, "fspath"): + traceback = excinfo.traceback + ntraceback = traceback.cut(path=self.fspath) + if ntraceback == traceback: + ntraceback = ntraceback.cut(excludepath=tracebackcutdir) + excinfo.traceback = ntraceback.filter() + + +def _check_initialpaths_for_relpath(session, fspath): + for initial_path in session._initialpaths: + if fspath.common(initial_path) == initial_path: + return fspath.relto(initial_path) + + +class FSCollector(Collector): + def __init__(self, fspath, parent=None, config=None, session=None, nodeid=None): + fspath = py.path.local(fspath) # xxx only for test_resultlog.py? + name = fspath.basename + if parent is not None: + rel = fspath.relto(parent.fspath) + if rel: + name = rel + name = name.replace(os.sep, SEP) + self.fspath = fspath + + session = session or parent.session + + if nodeid is None: + nodeid = self.fspath.relto(session.config.rootdir) + + if not nodeid: + nodeid = _check_initialpaths_for_relpath(session, fspath) + if nodeid and os.sep != SEP: + nodeid = nodeid.replace(os.sep, SEP) + + super(FSCollector, self).__init__( + name, parent, config, session, nodeid=nodeid, fspath=fspath + ) + + +class File(FSCollector): + """ base class for collecting tests from a file. """ + + +class Item(Node): + """ a basic test invocation item. Note that for a single function + there might be multiple test invocation items. + """ + + nextitem = None + + def __init__(self, name, parent=None, config=None, session=None, nodeid=None): + super(Item, self).__init__(name, parent, config, session, nodeid=nodeid) + self._report_sections = [] + + #: user properties is a list of tuples (name, value) that holds user + #: defined properties for this test. + self.user_properties = [] + + def add_report_section(self, when, key, content): + """ + Adds a new report section, similar to what's done internally to add stdout and + stderr captured output:: + + item.add_report_section("call", "stdout", "report section contents") + + :param str when: + One of the possible capture states, ``"setup"``, ``"call"``, ``"teardown"``. + :param str key: + Name of the section, can be customized at will. Pytest uses ``"stdout"`` and + ``"stderr"`` internally. + + :param str content: + The full contents as a string. + """ + if content: + self._report_sections.append((when, key, content)) + + def reportinfo(self): + return self.fspath, None, "" + + @property + def location(self): + try: + return self._location + except AttributeError: + location = self.reportinfo() + fspath = self.session._node_location_to_relpath(location[0]) + location = (fspath, location[1], str(location[2])) + self._location = location + return location diff --git a/venv/lib/python2.7/site-packages/_pytest/nose.py b/venv/lib/python2.7/site-packages/_pytest/nose.py new file mode 100644 index 0000000..fbab91d --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/nose.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +""" run test suites written for nose. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys + +import six + +import pytest +from _pytest import python +from _pytest import runner +from _pytest import unittest +from _pytest.config import hookimpl + + +def get_skip_exceptions(): + skip_classes = set() + for module_name in ("unittest", "unittest2", "nose"): + mod = sys.modules.get(module_name) + if hasattr(mod, "SkipTest"): + skip_classes.add(mod.SkipTest) + return tuple(skip_classes) + + +def pytest_runtest_makereport(item, call): + if call.excinfo and call.excinfo.errisinstance(get_skip_exceptions()): + # let's substitute the excinfo with a pytest.skip one + call2 = runner.CallInfo.from_call( + lambda: pytest.skip(six.text_type(call.excinfo.value)), call.when + ) + call.excinfo = call2.excinfo + + +@hookimpl(trylast=True) +def pytest_runtest_setup(item): + if is_potential_nosetest(item): + if not call_optional(item.obj, "setup"): + # call module level setup if there is no object level one + call_optional(item.parent.obj, "setup") + # XXX this implies we only call teardown when setup worked + item.session._setupstate.addfinalizer((lambda: teardown_nose(item)), item) + + +def teardown_nose(item): + if is_potential_nosetest(item): + if not call_optional(item.obj, "teardown"): + call_optional(item.parent.obj, "teardown") + # if hasattr(item.parent, '_nosegensetup'): + # #call_optional(item._nosegensetup, 'teardown') + # del item.parent._nosegensetup + + +def is_potential_nosetest(item): + # extra check needed since we do not do nose style setup/teardown + # on direct unittest style classes + return isinstance(item, python.Function) and not isinstance( + item, unittest.TestCaseFunction + ) + + +def call_optional(obj, name): + method = getattr(obj, name, None) + isfixture = hasattr(method, "_pytestfixturefunction") + if method is not None and not isfixture and callable(method): + # If there's any problems allow the exception to raise rather than + # silently ignoring them + method() + return True diff --git a/venv/lib/python2.7/site-packages/_pytest/outcomes.py b/venv/lib/python2.7/site-packages/_pytest/outcomes.py new file mode 100644 index 0000000..4620f95 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/outcomes.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +""" +exception classes and constants handling test outcomes +as well as functions creating them +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys + +from packaging.version import Version + + +class OutcomeException(BaseException): + """ OutcomeException and its subclass instances indicate and + contain info about test and collection outcomes. + """ + + def __init__(self, msg=None, pytrace=True): + BaseException.__init__(self, msg) + self.msg = msg + self.pytrace = pytrace + + def __repr__(self): + if self.msg: + val = self.msg + if isinstance(val, bytes): + val = val.decode("UTF-8", errors="replace") + return val + return "<%s instance>" % (self.__class__.__name__,) + + __str__ = __repr__ + + +TEST_OUTCOME = (OutcomeException, Exception) + + +class Skipped(OutcomeException): + # XXX hackish: on 3k we fake to live in the builtins + # in order to have Skipped exception printing shorter/nicer + __module__ = "builtins" + + def __init__(self, msg=None, pytrace=True, allow_module_level=False): + OutcomeException.__init__(self, msg=msg, pytrace=pytrace) + self.allow_module_level = allow_module_level + + +class Failed(OutcomeException): + """ raised from an explicit call to pytest.fail() """ + + __module__ = "builtins" + + +class Exit(Exception): + """ raised for immediate program exits (no tracebacks/summaries)""" + + def __init__(self, msg="unknown reason", returncode=None): + self.msg = msg + self.returncode = returncode + super(Exit, self).__init__(msg) + + +# exposed helper methods + + +def exit(msg, returncode=None): + """ + Exit testing process. + + :param str msg: message to display upon exit. + :param int returncode: return code to be used when exiting pytest. + """ + __tracebackhide__ = True + raise Exit(msg, returncode) + + +exit.Exception = Exit + + +def skip(msg="", **kwargs): + """ + Skip an executing test with the given message. + + This function should be called only during testing (setup, call or teardown) or + during collection by using the ``allow_module_level`` flag. This function can + be called in doctests as well. + + :kwarg bool allow_module_level: allows this function to be called at + module level, skipping the rest of the module. Default to False. + + .. note:: + It is better to use the :ref:`pytest.mark.skipif ref` marker when possible to declare a test to be + skipped under certain conditions like mismatching platforms or + dependencies. + Similarly, use the ``# doctest: +SKIP`` directive (see `doctest.SKIP + `_) + to skip a doctest statically. + """ + __tracebackhide__ = True + allow_module_level = kwargs.pop("allow_module_level", False) + if kwargs: + raise TypeError("unexpected keyword arguments: {}".format(sorted(kwargs))) + raise Skipped(msg=msg, allow_module_level=allow_module_level) + + +skip.Exception = Skipped + + +def fail(msg="", pytrace=True): + """ + Explicitly fail an executing test with the given message. + + :param str msg: the message to show the user as reason for the failure. + :param bool pytrace: if false the msg represents the full failure information and no + python traceback will be reported. + """ + __tracebackhide__ = True + raise Failed(msg=msg, pytrace=pytrace) + + +fail.Exception = Failed + + +class XFailed(fail.Exception): + """ raised from an explicit call to pytest.xfail() """ + + +def xfail(reason=""): + """ + Imperatively xfail an executing test or setup functions with the given reason. + + This function should be called only during testing (setup, call or teardown). + + .. note:: + It is better to use the :ref:`pytest.mark.xfail ref` marker when possible to declare a test to be + xfailed under certain conditions like known bugs or missing features. + """ + __tracebackhide__ = True + raise XFailed(reason) + + +xfail.Exception = XFailed + + +def importorskip(modname, minversion=None, reason=None): + """Imports and returns the requested module ``modname``, or skip the current test + if the module cannot be imported. + + :param str modname: the name of the module to import + :param str minversion: if given, the imported module ``__version__`` attribute must be + at least this minimal version, otherwise the test is still skipped. + :param str reason: if given, this reason is shown as the message when the module + cannot be imported. + """ + import warnings + + __tracebackhide__ = True + compile(modname, "", "eval") # to catch syntaxerrors + import_exc = None + + with warnings.catch_warnings(): + # make sure to ignore ImportWarnings that might happen because + # of existing directories with the same name we're trying to + # import but without a __init__.py file + warnings.simplefilter("ignore") + try: + __import__(modname) + except ImportError as exc: + # Do not raise chained exception here(#1485) + import_exc = exc + if import_exc: + if reason is None: + reason = "could not import %r: %s" % (modname, import_exc) + raise Skipped(reason, allow_module_level=True) + mod = sys.modules[modname] + if minversion is None: + return mod + verattr = getattr(mod, "__version__", None) + if minversion is not None: + if verattr is None or Version(verattr) < Version(minversion): + raise Skipped( + "module %r has __version__ %r, required is: %r" + % (modname, verattr, minversion), + allow_module_level=True, + ) + return mod diff --git a/venv/lib/python2.7/site-packages/_pytest/pastebin.py b/venv/lib/python2.7/site-packages/_pytest/pastebin.py new file mode 100644 index 0000000..7a3e802 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/pastebin.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +""" submit failure or test session information to a pastebin service. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys +import tempfile + +import six + +import pytest + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting") + group._addoption( + "--pastebin", + metavar="mode", + action="store", + dest="pastebin", + default=None, + choices=["failed", "all"], + help="send failed|all info to bpaste.net pastebin service.", + ) + + +@pytest.hookimpl(trylast=True) +def pytest_configure(config): + if config.option.pastebin == "all": + tr = config.pluginmanager.getplugin("terminalreporter") + # if no terminal reporter plugin is present, nothing we can do here; + # this can happen when this function executes in a slave node + # when using pytest-xdist, for example + if tr is not None: + # pastebin file will be utf-8 encoded binary file + config._pastebinfile = tempfile.TemporaryFile("w+b") + oldwrite = tr._tw.write + + def tee_write(s, **kwargs): + oldwrite(s, **kwargs) + if isinstance(s, six.text_type): + s = s.encode("utf-8") + config._pastebinfile.write(s) + + tr._tw.write = tee_write + + +def pytest_unconfigure(config): + if hasattr(config, "_pastebinfile"): + # get terminal contents and delete file + config._pastebinfile.seek(0) + sessionlog = config._pastebinfile.read() + config._pastebinfile.close() + del config._pastebinfile + # undo our patching in the terminal reporter + tr = config.pluginmanager.getplugin("terminalreporter") + del tr._tw.__dict__["write"] + # write summary + tr.write_sep("=", "Sending information to Paste Service") + pastebinurl = create_new_paste(sessionlog) + tr.write_line("pastebin session-log: %s\n" % pastebinurl) + + +def create_new_paste(contents): + """ + Creates a new paste using bpaste.net service. + + :contents: paste contents as utf-8 encoded bytes + :returns: url to the pasted contents + """ + import re + + if sys.version_info < (3, 0): + from urllib import urlopen, urlencode + else: + from urllib.request import urlopen + from urllib.parse import urlencode + + params = {"code": contents, "lexer": "text", "expiry": "1week"} + url = "https://bpaste.net" + response = urlopen(url, data=urlencode(params).encode("ascii")).read() + m = re.search(r'href="/raw/(\w+)"', response.decode("utf-8")) + if m: + return "%s/show/%s" % (url, m.group(1)) + else: + return "bad response: " + response + + +def pytest_terminal_summary(terminalreporter): + import _pytest.config + + if terminalreporter.config.option.pastebin != "failed": + return + tr = terminalreporter + if "failed" in tr.stats: + terminalreporter.write_sep("=", "Sending information to Paste Service") + for rep in terminalreporter.stats.get("failed"): + try: + msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc + except AttributeError: + msg = tr._getfailureheadline(rep) + tw = _pytest.config.create_terminal_writer( + terminalreporter.config, stringio=True + ) + rep.toterminal(tw) + s = tw.stringio.getvalue() + assert len(s) + pastebinurl = create_new_paste(s) + tr.write_line("%s --> %s" % (msg, pastebinurl)) diff --git a/venv/lib/python2.7/site-packages/_pytest/pathlib.py b/venv/lib/python2.7/site-packages/_pytest/pathlib.py new file mode 100644 index 0000000..42071f4 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/pathlib.py @@ -0,0 +1,380 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import atexit +import errno +import fnmatch +import itertools +import operator +import os +import shutil +import sys +import uuid +import warnings +from functools import partial +from functools import reduce +from os.path import expanduser +from os.path import expandvars +from os.path import isabs +from os.path import sep +from posixpath import sep as posix_sep + +import six +from six.moves import map + +from .compat import PY36 +from _pytest.warning_types import PytestWarning + +if PY36: + from pathlib import Path, PurePath +else: + from pathlib2 import Path, PurePath + +__all__ = ["Path", "PurePath"] + + +LOCK_TIMEOUT = 60 * 60 * 3 + +get_lock_path = operator.methodcaller("joinpath", ".lock") + + +def ensure_reset_dir(path): + """ + ensures the given path is an empty directory + """ + if path.exists(): + rm_rf(path) + path.mkdir() + + +def on_rm_rf_error(func, path, exc, **kwargs): + """Handles known read-only errors during rmtree. + + The returned value is used only by our own tests. + """ + start_path = kwargs["start_path"] + exctype, excvalue = exc[:2] + + # another process removed the file in the middle of the "rm_rf" (xdist for example) + # more context: https://github.com/pytest-dev/pytest/issues/5974#issuecomment-543799018 + if isinstance(excvalue, OSError) and excvalue.errno == errno.ENOENT: + return False + + if not isinstance(excvalue, OSError) or excvalue.errno not in ( + errno.EACCES, + errno.EPERM, + ): + warnings.warn( + PytestWarning( + "(rm_rf) error removing {}\n{}: {}".format(path, exctype, excvalue) + ) + ) + return False + + if func not in (os.rmdir, os.remove, os.unlink): + warnings.warn( + PytestWarning( + "(rm_rf) unknown function {} when removing {}:\n{}: {}".format( + path, func, exctype, excvalue + ) + ) + ) + return False + + # Chmod + retry. + import stat + + def chmod_rw(p): + mode = os.stat(p).st_mode + os.chmod(p, mode | stat.S_IRUSR | stat.S_IWUSR) + + # For files, we need to recursively go upwards in the directories to + # ensure they all are also writable. + p = Path(path) + if p.is_file(): + for parent in p.parents: + chmod_rw(str(parent)) + # stop when we reach the original path passed to rm_rf + if parent == start_path: + break + chmod_rw(str(path)) + + func(path) + return True + + +def rm_rf(path): + """Remove the path contents recursively, even if some elements + are read-only. + """ + onerror = partial(on_rm_rf_error, start_path=path) + shutil.rmtree(str(path), onerror=onerror) + + +def find_prefixed(root, prefix): + """finds all elements in root that begin with the prefix, case insensitive""" + l_prefix = prefix.lower() + for x in root.iterdir(): + if x.name.lower().startswith(l_prefix): + yield x + + +def extract_suffixes(iter, prefix): + """ + :param iter: iterator over path names + :param prefix: expected prefix of the path names + :returns: the parts of the paths following the prefix + """ + p_len = len(prefix) + for p in iter: + yield p.name[p_len:] + + +def find_suffixes(root, prefix): + """combines find_prefixes and extract_suffixes + """ + return extract_suffixes(find_prefixed(root, prefix), prefix) + + +def parse_num(maybe_num): + """parses number path suffixes, returns -1 on error""" + try: + return int(maybe_num) + except ValueError: + return -1 + + +if six.PY2: + + def _max(iterable, default): + """needed due to python2.7 lacking the default argument for max""" + return reduce(max, iterable, default) + + +else: + _max = max + + +def _force_symlink(root, target, link_to): + """helper to create the current symlink + + it's full of race conditions that are reasonably ok to ignore + for the context of best effort linking to the latest testrun + + the presumption being thatin case of much parallelism + the inaccuracy is going to be acceptable + """ + current_symlink = root.joinpath(target) + try: + current_symlink.unlink() + except OSError: + pass + try: + current_symlink.symlink_to(link_to) + except Exception: + pass + + +def make_numbered_dir(root, prefix): + """create a directory with an increased number as suffix for the given prefix""" + for i in range(10): + # try up to 10 times to create the folder + max_existing = _max(map(parse_num, find_suffixes(root, prefix)), default=-1) + new_number = max_existing + 1 + new_path = root.joinpath("{}{}".format(prefix, new_number)) + try: + new_path.mkdir() + except Exception: + pass + else: + _force_symlink(root, prefix + "current", new_path) + return new_path + else: + raise EnvironmentError( + "could not create numbered dir with prefix " + "{prefix} in {root} after 10 tries".format(prefix=prefix, root=root) + ) + + +def create_cleanup_lock(p): + """crates a lock to prevent premature folder cleanup""" + lock_path = get_lock_path(p) + try: + fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644) + except OSError as e: + if e.errno == errno.EEXIST: + six.raise_from( + EnvironmentError("cannot create lockfile in {path}".format(path=p)), e + ) + else: + raise + else: + pid = os.getpid() + spid = str(pid) + if not isinstance(spid, bytes): + spid = spid.encode("ascii") + os.write(fd, spid) + os.close(fd) + if not lock_path.is_file(): + raise EnvironmentError("lock path got renamed after successful creation") + return lock_path + + +def register_cleanup_lock_removal(lock_path, register=atexit.register): + """registers a cleanup function for removing a lock, by default on atexit""" + pid = os.getpid() + + def cleanup_on_exit(lock_path=lock_path, original_pid=pid): + current_pid = os.getpid() + if current_pid != original_pid: + # fork + return + try: + lock_path.unlink() + except (OSError, IOError): + pass + + return register(cleanup_on_exit) + + +def maybe_delete_a_numbered_dir(path): + """removes a numbered directory if its lock can be obtained and it does not seem to be in use""" + lock_path = None + try: + lock_path = create_cleanup_lock(path) + parent = path.parent + + garbage = parent.joinpath("garbage-{}".format(uuid.uuid4())) + path.rename(garbage) + rm_rf(garbage) + except (OSError, EnvironmentError): + # known races: + # * other process did a cleanup at the same time + # * deletable folder was found + # * process cwd (Windows) + return + finally: + # if we created the lock, ensure we remove it even if we failed + # to properly remove the numbered dir + if lock_path is not None: + try: + lock_path.unlink() + except (OSError, IOError): + pass + + +def ensure_deletable(path, consider_lock_dead_if_created_before): + """checks if a lock exists and breaks it if its considered dead""" + if path.is_symlink(): + return False + lock = get_lock_path(path) + if not lock.exists(): + return True + try: + lock_time = lock.stat().st_mtime + except Exception: + return False + else: + if lock_time < consider_lock_dead_if_created_before: + lock.unlink() + return True + else: + return False + + +def try_cleanup(path, consider_lock_dead_if_created_before): + """tries to cleanup a folder if we can ensure it's deletable""" + if ensure_deletable(path, consider_lock_dead_if_created_before): + maybe_delete_a_numbered_dir(path) + + +def cleanup_candidates(root, prefix, keep): + """lists candidates for numbered directories to be removed - follows py.path""" + max_existing = _max(map(parse_num, find_suffixes(root, prefix)), default=-1) + max_delete = max_existing - keep + paths = find_prefixed(root, prefix) + paths, paths2 = itertools.tee(paths) + numbers = map(parse_num, extract_suffixes(paths2, prefix)) + for path, number in zip(paths, numbers): + if number <= max_delete: + yield path + + +def cleanup_numbered_dir(root, prefix, keep, consider_lock_dead_if_created_before): + """cleanup for lock driven numbered directories""" + for path in cleanup_candidates(root, prefix, keep): + try_cleanup(path, consider_lock_dead_if_created_before) + for path in root.glob("garbage-*"): + try_cleanup(path, consider_lock_dead_if_created_before) + + +def make_numbered_dir_with_cleanup(root, prefix, keep, lock_timeout): + """creates a numbered dir with a cleanup lock and removes old ones""" + e = None + for i in range(10): + try: + p = make_numbered_dir(root, prefix) + lock_path = create_cleanup_lock(p) + register_cleanup_lock_removal(lock_path) + except Exception as exc: + e = exc + else: + consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout + cleanup_numbered_dir( + root=root, + prefix=prefix, + keep=keep, + consider_lock_dead_if_created_before=consider_lock_dead_if_created_before, + ) + return p + assert e is not None + raise e + + +def resolve_from_str(input, root): + assert not isinstance(input, Path), "would break on py2" + root = Path(root) + input = expanduser(input) + input = expandvars(input) + if isabs(input): + return Path(input) + else: + return root.joinpath(input) + + +def fnmatch_ex(pattern, path): + """FNMatcher port from py.path.common which works with PurePath() instances. + + The difference between this algorithm and PurePath.match() is that the latter matches "**" glob expressions + for each part of the path, while this algorithm uses the whole path instead. + + For example: + "tests/foo/bar/doc/test_foo.py" matches pattern "tests/**/doc/test*.py" with this algorithm, but not with + PurePath.match(). + + This algorithm was ported to keep backward-compatibility with existing settings which assume paths match according + this logic. + + References: + * https://bugs.python.org/issue29249 + * https://bugs.python.org/issue34731 + """ + path = PurePath(path) + iswin32 = sys.platform.startswith("win") + + if iswin32 and sep not in pattern and posix_sep in pattern: + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posix_sep, sep) + + if sep not in pattern: + name = path.name + else: + name = six.text_type(path) + return fnmatch.fnmatch(name, pattern) + + +def parts(s): + parts = s.split(sep) + return {sep.join(parts[: i + 1]) or sep for i in range(len(parts))} diff --git a/venv/lib/python2.7/site-packages/_pytest/pytester.py b/venv/lib/python2.7/site-packages/_pytest/pytester.py new file mode 100644 index 0000000..f1d739c --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/pytester.py @@ -0,0 +1,1413 @@ +# -*- coding: utf-8 -*- +"""(disabled by default) support for testing pytest and pytest plugins.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import codecs +import gc +import os +import platform +import re +import subprocess +import sys +import time +import traceback +from fnmatch import fnmatch +from weakref import WeakKeyDictionary + +import py +import six + +import pytest +from _pytest._code import Source +from _pytest._io.saferepr import saferepr +from _pytest.assertion.rewrite import AssertionRewritingHook +from _pytest.capture import MultiCapture +from _pytest.capture import SysCapture +from _pytest.compat import safe_str +from _pytest.compat import Sequence +from _pytest.main import EXIT_INTERRUPTED +from _pytest.main import EXIT_OK +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.pathlib import Path + +IGNORE_PAM = [ # filenames added when obtaining details about the current user + u"/var/lib/sss/mc/passwd" +] + + +def pytest_addoption(parser): + parser.addoption( + "--lsof", + action="store_true", + dest="lsof", + default=False, + help="run FD checks if lsof is available", + ) + + parser.addoption( + "--runpytest", + default="inprocess", + dest="runpytest", + choices=("inprocess", "subprocess"), + help=( + "run pytest sub runs in tests using an 'inprocess' " + "or 'subprocess' (python -m main) method" + ), + ) + + parser.addini( + "pytester_example_dir", help="directory to take the pytester example files from" + ) + + +def pytest_configure(config): + if config.getvalue("lsof"): + checker = LsofFdLeakChecker() + if checker.matching_platform(): + config.pluginmanager.register(checker) + + config.addinivalue_line( + "markers", + "pytester_example_path(*path_segments): join the given path " + "segments to `pytester_example_dir` for this test.", + ) + + +def raise_on_kwargs(kwargs): + __tracebackhide__ = True + if kwargs: # pragma: no branch + raise TypeError( + "Unexpected keyword arguments: {}".format(", ".join(sorted(kwargs))) + ) + + +class LsofFdLeakChecker(object): + def get_open_files(self): + out = self._exec_lsof() + open_files = self._parse_lsof_output(out) + return open_files + + def _exec_lsof(self): + pid = os.getpid() + # py3: use subprocess.DEVNULL directly. + with open(os.devnull, "wb") as devnull: + return subprocess.check_output( + ("lsof", "-Ffn0", "-p", str(pid)), stderr=devnull + ).decode() + + def _parse_lsof_output(self, out): + def isopen(line): + return line.startswith("f") and ( + "deleted" not in line + and "mem" not in line + and "txt" not in line + and "cwd" not in line + ) + + open_files = [] + + for line in out.split("\n"): + if isopen(line): + fields = line.split("\0") + fd = fields[0][1:] + filename = fields[1][1:] + if filename in IGNORE_PAM: + continue + if filename.startswith("/"): + open_files.append((fd, filename)) + + return open_files + + def matching_platform(self): + try: + subprocess.check_output(("lsof", "-v")) + except (OSError, subprocess.CalledProcessError): + return False + else: + return True + + @pytest.hookimpl(hookwrapper=True, tryfirst=True) + def pytest_runtest_protocol(self, item): + lines1 = self.get_open_files() + yield + if hasattr(sys, "pypy_version_info"): + gc.collect() + lines2 = self.get_open_files() + + new_fds = {t[0] for t in lines2} - {t[0] for t in lines1} + leaked_files = [t for t in lines2 if t[0] in new_fds] + if leaked_files: + error = [] + error.append("***** %s FD leakage detected" % len(leaked_files)) + error.extend([str(f) for f in leaked_files]) + error.append("*** Before:") + error.extend([str(f) for f in lines1]) + error.append("*** After:") + error.extend([str(f) for f in lines2]) + error.append(error[0]) + error.append("*** function %s:%s: %s " % item.location) + error.append("See issue #2366") + item.warn(pytest.PytestWarning("\n".join(error))) + + +# used at least by pytest-xdist plugin + + +@pytest.fixture +def _pytest(request): + """Return a helper which offers a gethookrecorder(hook) method which + returns a HookRecorder instance which helps to make assertions about called + hooks. + + """ + return PytestArg(request) + + +class PytestArg(object): + def __init__(self, request): + self.request = request + + def gethookrecorder(self, hook): + hookrecorder = HookRecorder(hook._pm) + self.request.addfinalizer(hookrecorder.finish_recording) + return hookrecorder + + +def get_public_names(values): + """Only return names from iterator values without a leading underscore.""" + return [x for x in values if x[0] != "_"] + + +class ParsedCall(object): + def __init__(self, name, kwargs): + self.__dict__.update(kwargs) + self._name = name + + def __repr__(self): + d = self.__dict__.copy() + del d["_name"] + return "" % (self._name, d) + + +class HookRecorder(object): + """Record all hooks called in a plugin manager. + + This wraps all the hook calls in the plugin manager, recording each call + before propagating the normal calls. + + """ + + def __init__(self, pluginmanager): + self._pluginmanager = pluginmanager + self.calls = [] + + def before(hook_name, hook_impls, kwargs): + self.calls.append(ParsedCall(hook_name, kwargs)) + + def after(outcome, hook_name, hook_impls, kwargs): + pass + + self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after) + + def finish_recording(self): + self._undo_wrapping() + + def getcalls(self, names): + if isinstance(names, str): + names = names.split() + return [call for call in self.calls if call._name in names] + + def assert_contains(self, entries): + __tracebackhide__ = True + i = 0 + entries = list(entries) + backlocals = sys._getframe(1).f_locals + while entries: + name, check = entries.pop(0) + for ind, call in enumerate(self.calls[i:]): + if call._name == name: + print("NAMEMATCH", name, call) + if eval(check, backlocals, call.__dict__): + print("CHECKERMATCH", repr(check), "->", call) + else: + print("NOCHECKERMATCH", repr(check), "-", call) + continue + i += ind + 1 + break + print("NONAMEMATCH", name, "with", call) + else: + pytest.fail("could not find %r check %r" % (name, check)) + + def popcall(self, name): + __tracebackhide__ = True + for i, call in enumerate(self.calls): + if call._name == name: + del self.calls[i] + return call + lines = ["could not find call %r, in:" % (name,)] + lines.extend([" %s" % x for x in self.calls]) + pytest.fail("\n".join(lines)) + + def getcall(self, name): + values = self.getcalls(name) + assert len(values) == 1, (name, values) + return values[0] + + # functionality for test reports + + def getreports(self, names="pytest_runtest_logreport pytest_collectreport"): + return [x.report for x in self.getcalls(names)] + + def matchreport( + self, + inamepart="", + names="pytest_runtest_logreport pytest_collectreport", + when=None, + ): + """return a testreport whose dotted import path matches""" + values = [] + for rep in self.getreports(names=names): + if not when and rep.when != "call" and rep.passed: + # setup/teardown passing reports - let's ignore those + continue + if when and rep.when != when: + continue + if not inamepart or inamepart in rep.nodeid.split("::"): + values.append(rep) + if not values: + raise ValueError( + "could not find test report matching %r: " + "no test reports at all!" % (inamepart,) + ) + if len(values) > 1: + raise ValueError( + "found 2 or more testreports matching %r: %s" % (inamepart, values) + ) + return values[0] + + def getfailures(self, names="pytest_runtest_logreport pytest_collectreport"): + return [rep for rep in self.getreports(names) if rep.failed] + + def getfailedcollections(self): + return self.getfailures("pytest_collectreport") + + def listoutcomes(self): + passed = [] + skipped = [] + failed = [] + for rep in self.getreports("pytest_collectreport pytest_runtest_logreport"): + if rep.passed: + if rep.when == "call": + passed.append(rep) + elif rep.skipped: + skipped.append(rep) + else: + assert rep.failed, "Unexpected outcome: {!r}".format(rep) + failed.append(rep) + return passed, skipped, failed + + def countoutcomes(self): + return [len(x) for x in self.listoutcomes()] + + def assertoutcome(self, passed=0, skipped=0, failed=0): + realpassed, realskipped, realfailed = self.listoutcomes() + assert passed == len(realpassed) + assert skipped == len(realskipped) + assert failed == len(realfailed) + + def clear(self): + self.calls[:] = [] + + +@pytest.fixture +def linecomp(request): + return LineComp() + + +@pytest.fixture(name="LineMatcher") +def LineMatcher_fixture(request): + return LineMatcher + + +@pytest.fixture +def testdir(request, tmpdir_factory): + return Testdir(request, tmpdir_factory) + + +@pytest.fixture +def _sys_snapshot(): + snappaths = SysPathsSnapshot() + snapmods = SysModulesSnapshot() + yield + snapmods.restore() + snappaths.restore() + + +@pytest.fixture +def _config_for_test(): + from _pytest.config import get_config + + config = get_config() + yield config + config._ensure_unconfigure() # cleanup, e.g. capman closing tmpfiles. + + +rex_outcome = re.compile(r"(\d+) ([\w-]+)") + + +class RunResult(object): + """The result of running a command. + + Attributes: + + :ret: the return value + :outlines: list of lines captured from stdout + :errlines: list of lines captures from stderr + :stdout: :py:class:`LineMatcher` of stdout, use ``stdout.str()`` to + reconstruct stdout or the commonly used ``stdout.fnmatch_lines()`` + method + :stderr: :py:class:`LineMatcher` of stderr + :duration: duration in seconds + + """ + + def __init__(self, ret, outlines, errlines, duration): + self.ret = ret + self.outlines = outlines + self.errlines = errlines + self.stdout = LineMatcher(outlines) + self.stderr = LineMatcher(errlines) + self.duration = duration + + def __repr__(self): + return ( + "" + % (self.ret, len(self.stdout.lines), len(self.stderr.lines), self.duration) + ) + + def parseoutcomes(self): + """Return a dictionary of outcomestring->num from parsing the terminal + output that the test process produced. + + """ + for line in reversed(self.outlines): + if "seconds" in line: + outcomes = rex_outcome.findall(line) + if outcomes: + d = {} + for num, cat in outcomes: + d[cat] = int(num) + return d + raise ValueError("Pytest terminal report not found") + + def assert_outcomes( + self, passed=0, skipped=0, failed=0, error=0, xpassed=0, xfailed=0 + ): + """Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run. + + """ + d = self.parseoutcomes() + obtained = { + "passed": d.get("passed", 0), + "skipped": d.get("skipped", 0), + "failed": d.get("failed", 0), + "error": d.get("error", 0), + "xpassed": d.get("xpassed", 0), + "xfailed": d.get("xfailed", 0), + } + expected = { + "passed": passed, + "skipped": skipped, + "failed": failed, + "error": error, + "xpassed": xpassed, + "xfailed": xfailed, + } + assert obtained == expected + + +class CwdSnapshot(object): + def __init__(self): + self.__saved = os.getcwd() + + def restore(self): + os.chdir(self.__saved) + + +class SysModulesSnapshot(object): + def __init__(self, preserve=None): + self.__preserve = preserve + self.__saved = dict(sys.modules) + + def restore(self): + if self.__preserve: + self.__saved.update( + (k, m) for k, m in sys.modules.items() if self.__preserve(k) + ) + sys.modules.clear() + sys.modules.update(self.__saved) + + +class SysPathsSnapshot(object): + def __init__(self): + self.__saved = list(sys.path), list(sys.meta_path) + + def restore(self): + sys.path[:], sys.meta_path[:] = self.__saved + + +class Testdir(object): + """Temporary test directory with tools to test/run pytest itself. + + This is based on the ``tmpdir`` fixture but provides a number of methods + which aid with testing pytest itself. Unless :py:meth:`chdir` is used all + methods will use :py:attr:`tmpdir` as their current working directory. + + Attributes: + + :tmpdir: The :py:class:`py.path.local` instance of the temporary directory. + + :plugins: A list of plugins to use with :py:meth:`parseconfig` and + :py:meth:`runpytest`. Initially this is an empty list but plugins can + be added to the list. The type of items to add to the list depends on + the method using them so refer to them for details. + + """ + + CLOSE_STDIN = object + + class TimeoutExpired(Exception): + pass + + def __init__(self, request, tmpdir_factory): + self.request = request + self._mod_collections = WeakKeyDictionary() + name = request.function.__name__ + self.tmpdir = tmpdir_factory.mktemp(name, numbered=True) + self.test_tmproot = tmpdir_factory.mktemp("tmp-" + name, numbered=True) + self.plugins = [] + self._cwd_snapshot = CwdSnapshot() + self._sys_path_snapshot = SysPathsSnapshot() + self._sys_modules_snapshot = self.__take_sys_modules_snapshot() + self.chdir() + self.request.addfinalizer(self.finalize) + method = self.request.config.getoption("--runpytest") + if method == "inprocess": + self._runpytest_method = self.runpytest_inprocess + elif method == "subprocess": + self._runpytest_method = self.runpytest_subprocess + + mp = self.monkeypatch = MonkeyPatch() + mp.setenv("PYTEST_DEBUG_TEMPROOT", str(self.test_tmproot)) + # Ensure no unexpected caching via tox. + mp.delenv("TOX_ENV_DIR", raising=False) + # Discard outer pytest options. + mp.delenv("PYTEST_ADDOPTS", raising=False) + + # Environment (updates) for inner runs. + tmphome = str(self.tmpdir) + self._env_run_update = {"HOME": tmphome, "USERPROFILE": tmphome} + + def __repr__(self): + return "" % (self.tmpdir,) + + def __str__(self): + return str(self.tmpdir) + + def finalize(self): + """Clean up global state artifacts. + + Some methods modify the global interpreter state and this tries to + clean this up. It does not remove the temporary directory however so + it can be looked at after the test run has finished. + + """ + self._sys_modules_snapshot.restore() + self._sys_path_snapshot.restore() + self._cwd_snapshot.restore() + self.monkeypatch.undo() + + def __take_sys_modules_snapshot(self): + # some zope modules used by twisted-related tests keep internal state + # and can't be deleted; we had some trouble in the past with + # `zope.interface` for example + def preserve_module(name): + return name.startswith("zope") + + return SysModulesSnapshot(preserve=preserve_module) + + def make_hook_recorder(self, pluginmanager): + """Create a new :py:class:`HookRecorder` for a PluginManager.""" + pluginmanager.reprec = reprec = HookRecorder(pluginmanager) + self.request.addfinalizer(reprec.finish_recording) + return reprec + + def chdir(self): + """Cd into the temporary directory. + + This is done automatically upon instantiation. + + """ + self.tmpdir.chdir() + + def _makefile(self, ext, args, kwargs, encoding="utf-8"): + items = list(kwargs.items()) + + def to_text(s): + return s.decode(encoding) if isinstance(s, bytes) else six.text_type(s) + + if args: + source = u"\n".join(to_text(x) for x in args) + basename = self.request.function.__name__ + items.insert(0, (basename, source)) + + ret = None + for basename, value in items: + p = self.tmpdir.join(basename).new(ext=ext) + p.dirpath().ensure_dir() + source = Source(value) + source = u"\n".join(to_text(line) for line in source.lines) + p.write(source.strip().encode(encoding), "wb") + if ret is None: + ret = p + return ret + + def makefile(self, ext, *args, **kwargs): + r"""Create new file(s) in the testdir. + + :param str ext: The extension the file(s) should use, including the dot, e.g. `.py`. + :param list[str] args: All args will be treated as strings and joined using newlines. + The result will be written as contents to the file. The name of the + file will be based on the test function requesting this fixture. + :param kwargs: Each keyword is the name of a file, while the value of it will + be written as contents of the file. + + Examples: + + .. code-block:: python + + testdir.makefile(".txt", "line1", "line2") + + testdir.makefile(".ini", pytest="[pytest]\naddopts=-rs\n") + + """ + return self._makefile(ext, args, kwargs) + + def makeconftest(self, source): + """Write a contest.py file with 'source' as contents.""" + return self.makepyfile(conftest=source) + + def makeini(self, source): + """Write a tox.ini file with 'source' as contents.""" + return self.makefile(".ini", tox=source) + + def getinicfg(self, source): + """Return the pytest section from the tox.ini config file.""" + p = self.makeini(source) + return py.iniconfig.IniConfig(p)["pytest"] + + def makepyfile(self, *args, **kwargs): + """Shortcut for .makefile() with a .py extension.""" + return self._makefile(".py", args, kwargs) + + def maketxtfile(self, *args, **kwargs): + """Shortcut for .makefile() with a .txt extension.""" + return self._makefile(".txt", args, kwargs) + + def syspathinsert(self, path=None): + """Prepend a directory to sys.path, defaults to :py:attr:`tmpdir`. + + This is undone automatically when this object dies at the end of each + test. + """ + if path is None: + path = self.tmpdir + + self.monkeypatch.syspath_prepend(str(path)) + + def mkdir(self, name): + """Create a new (sub)directory.""" + return self.tmpdir.mkdir(name) + + def mkpydir(self, name): + """Create a new python package. + + This creates a (sub)directory with an empty ``__init__.py`` file so it + gets recognised as a python package. + + """ + p = self.mkdir(name) + p.ensure("__init__.py") + return p + + def copy_example(self, name=None): + import warnings + from _pytest.warning_types import PYTESTER_COPY_EXAMPLE + + warnings.warn(PYTESTER_COPY_EXAMPLE, stacklevel=2) + example_dir = self.request.config.getini("pytester_example_dir") + if example_dir is None: + raise ValueError("pytester_example_dir is unset, can't copy examples") + example_dir = self.request.config.rootdir.join(example_dir) + + for extra_element in self.request.node.iter_markers("pytester_example_path"): + assert extra_element.args + example_dir = example_dir.join(*extra_element.args) + + if name is None: + func_name = self.request.function.__name__ + maybe_dir = example_dir / func_name + maybe_file = example_dir / (func_name + ".py") + + if maybe_dir.isdir(): + example_path = maybe_dir + elif maybe_file.isfile(): + example_path = maybe_file + else: + raise LookupError( + "{} cant be found as module or package in {}".format( + func_name, example_dir.bestrelpath(self.request.config.rootdir) + ) + ) + else: + example_path = example_dir.join(name) + + if example_path.isdir() and not example_path.join("__init__.py").isfile(): + example_path.copy(self.tmpdir) + return self.tmpdir + elif example_path.isfile(): + result = self.tmpdir.join(example_path.basename) + example_path.copy(result) + return result + else: + raise LookupError( + 'example "{}" is not found as a file or directory'.format(example_path) + ) + + Session = Session + + def getnode(self, config, arg): + """Return the collection node of a file. + + :param config: :py:class:`_pytest.config.Config` instance, see + :py:meth:`parseconfig` and :py:meth:`parseconfigure` to create the + configuration + + :param arg: a :py:class:`py.path.local` instance of the file + + """ + session = Session(config) + assert "::" not in str(arg) + p = py.path.local(arg) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([str(p)], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK) + return res + + def getpathnode(self, path): + """Return the collection node of a file. + + This is like :py:meth:`getnode` but uses :py:meth:`parseconfigure` to + create the (configured) pytest Config instance. + + :param path: a :py:class:`py.path.local` instance of the file + + """ + config = self.parseconfigure(path) + session = Session(config) + x = session.fspath.bestrelpath(path) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([x], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK) + return res + + def genitems(self, colitems): + """Generate all test items from a collection node. + + This recurses into the collection node and returns a list of all the + test items contained within. + + """ + session = colitems[0].session + result = [] + for colitem in colitems: + result.extend(session.genitems(colitem)) + return result + + def runitem(self, source): + """Run the "test_func" Item. + + The calling test instance (class containing the test method) must + provide a ``.getrunner()`` method which should return a runner which + can run the test protocol for a single item, e.g. + :py:func:`_pytest.runner.runtestprotocol`. + + """ + # used from runner functional tests + item = self.getitem(source) + # the test class where we are called from wants to provide the runner + testclassinstance = self.request.instance + runner = testclassinstance.getrunner() + return runner(item) + + def inline_runsource(self, source, *cmdlineargs): + """Run a test module in process using ``pytest.main()``. + + This run writes "source" into a temporary file and runs + ``pytest.main()`` on it, returning a :py:class:`HookRecorder` instance + for the result. + + :param source: the source code of the test module + + :param cmdlineargs: any extra command line arguments to use + + :return: :py:class:`HookRecorder` instance of the result + + """ + p = self.makepyfile(source) + values = list(cmdlineargs) + [p] + return self.inline_run(*values) + + def inline_genitems(self, *args): + """Run ``pytest.main(['--collectonly'])`` in-process. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself like :py:meth:`inline_run`, but returns a + tuple of the collected items and a :py:class:`HookRecorder` instance. + + """ + rec = self.inline_run("--collect-only", *args) + items = [x.item for x in rec.getcalls("pytest_itemcollected")] + return items, rec + + def inline_run(self, *args, **kwargs): + """Run ``pytest.main()`` in-process, returning a HookRecorder. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself. This means it can return a + :py:class:`HookRecorder` instance which gives more detailed results + from that run than can be done by matching stdout/stderr from + :py:meth:`runpytest`. + + :param args: command line arguments to pass to :py:func:`pytest.main` + + :param plugins: (keyword-only) extra plugin instances the + ``pytest.main()`` instance should use + + :return: a :py:class:`HookRecorder` instance + """ + plugins = kwargs.pop("plugins", []) + no_reraise_ctrlc = kwargs.pop("no_reraise_ctrlc", None) + raise_on_kwargs(kwargs) + + finalizers = [] + try: + # Do not load user config (during runs only). + mp_run = MonkeyPatch() + for k, v in self._env_run_update.items(): + mp_run.setenv(k, v) + finalizers.append(mp_run.undo) + + # When running pytest inline any plugins active in the main test + # process are already imported. So this disables the warning which + # will trigger to say they can no longer be rewritten, which is + # fine as they have already been rewritten. + orig_warn = AssertionRewritingHook._warn_already_imported + + def revert_warn_already_imported(): + AssertionRewritingHook._warn_already_imported = orig_warn + + finalizers.append(revert_warn_already_imported) + AssertionRewritingHook._warn_already_imported = lambda *a: None + + # Any sys.module or sys.path changes done while running pytest + # inline should be reverted after the test run completes to avoid + # clashing with later inline tests run within the same pytest test, + # e.g. just because they use matching test module names. + finalizers.append(self.__take_sys_modules_snapshot().restore) + finalizers.append(SysPathsSnapshot().restore) + + # Important note: + # - our tests should not leave any other references/registrations + # laying around other than possibly loaded test modules + # referenced from sys.modules, as nothing will clean those up + # automatically + + rec = [] + + class Collect(object): + def pytest_configure(x, config): + rec.append(self.make_hook_recorder(config.pluginmanager)) + + plugins.append(Collect()) + ret = pytest.main(list(args), plugins=plugins) + if len(rec) == 1: + reprec = rec.pop() + else: + + class reprec(object): + pass + + reprec.ret = ret + + # typically we reraise keyboard interrupts from the child run + # because it's our user requesting interruption of the testing + if ret == EXIT_INTERRUPTED and not no_reraise_ctrlc: + calls = reprec.getcalls("pytest_keyboard_interrupt") + if calls and calls[-1].excinfo.type == KeyboardInterrupt: + raise KeyboardInterrupt() + return reprec + finally: + for finalizer in finalizers: + finalizer() + + def runpytest_inprocess(self, *args, **kwargs): + """Return result of running pytest in-process, providing a similar + interface to what self.runpytest() provides. + """ + syspathinsert = kwargs.pop("syspathinsert", False) + + if syspathinsert: + self.syspathinsert() + now = time.time() + capture = MultiCapture(Capture=SysCapture) + capture.start_capturing() + try: + try: + reprec = self.inline_run(*args, **kwargs) + except SystemExit as e: + + class reprec(object): + ret = e.args[0] + + except Exception: + traceback.print_exc() + + class reprec(object): + ret = 3 + + finally: + out, err = capture.readouterr() + capture.stop_capturing() + sys.stdout.write(out) + sys.stderr.write(err) + + res = RunResult(reprec.ret, out.split("\n"), err.split("\n"), time.time() - now) + res.reprec = reprec + return res + + def runpytest(self, *args, **kwargs): + """Run pytest inline or in a subprocess, depending on the command line + option "--runpytest" and return a :py:class:`RunResult`. + + """ + args = self._ensure_basetemp(args) + return self._runpytest_method(*args, **kwargs) + + def _ensure_basetemp(self, args): + args = list(args) + for x in args: + if safe_str(x).startswith("--basetemp"): + break + else: + args.append("--basetemp=%s" % self.tmpdir.dirpath("basetemp")) + return args + + def parseconfig(self, *args): + """Return a new pytest Config instance from given commandline args. + + This invokes the pytest bootstrapping code in _pytest.config to create + a new :py:class:`_pytest.core.PluginManager` and call the + pytest_cmdline_parse hook to create a new + :py:class:`_pytest.config.Config` instance. + + If :py:attr:`plugins` has been populated they should be plugin modules + to be registered with the PluginManager. + + """ + args = self._ensure_basetemp(args) + + import _pytest.config + + config = _pytest.config._prepareconfig(args, self.plugins) + # we don't know what the test will do with this half-setup config + # object and thus we make sure it gets unconfigured properly in any + # case (otherwise capturing could still be active, for example) + self.request.addfinalizer(config._ensure_unconfigure) + return config + + def parseconfigure(self, *args): + """Return a new pytest configured Config instance. + + This returns a new :py:class:`_pytest.config.Config` instance like + :py:meth:`parseconfig`, but also calls the pytest_configure hook. + + """ + config = self.parseconfig(*args) + config._do_configure() + self.request.addfinalizer(config._ensure_unconfigure) + return config + + def getitem(self, source, funcname="test_func"): + """Return the test item for a test function. + + This writes the source to a python file and runs pytest's collection on + the resulting module, returning the test item for the requested + function name. + + :param source: the module source + + :param funcname: the name of the test function for which to return a + test item + + """ + items = self.getitems(source) + for item in items: + if item.name == funcname: + return item + assert 0, "%r item not found in module:\n%s\nitems: %s" % ( + funcname, + source, + items, + ) + + def getitems(self, source): + """Return all test items collected from the module. + + This writes the source to a python file and runs pytest's collection on + the resulting module, returning all test items contained within. + + """ + modcol = self.getmodulecol(source) + return self.genitems([modcol]) + + def getmodulecol(self, source, configargs=(), withinit=False): + """Return the module collection node for ``source``. + + This writes ``source`` to a file using :py:meth:`makepyfile` and then + runs the pytest collection on it, returning the collection node for the + test module. + + :param source: the source code of the module to collect + + :param configargs: any extra arguments to pass to + :py:meth:`parseconfigure` + + :param withinit: whether to also write an ``__init__.py`` file to the + same directory to ensure it is a package + + """ + if isinstance(source, Path): + path = self.tmpdir.join(str(source)) + assert not withinit, "not supported for paths" + else: + kw = {self.request.function.__name__: Source(source).strip()} + path = self.makepyfile(**kw) + if withinit: + self.makepyfile(__init__="#") + self.config = config = self.parseconfigure(path, *configargs) + return self.getnode(config, path) + + def collect_by_name(self, modcol, name): + """Return the collection node for name from the module collection. + + This will search a module collection node for a collection node + matching the given name. + + :param modcol: a module collection node; see :py:meth:`getmodulecol` + + :param name: the name of the node to return + + """ + if modcol not in self._mod_collections: + self._mod_collections[modcol] = list(modcol.collect()) + for colitem in self._mod_collections[modcol]: + if colitem.name == name: + return colitem + + def popen( + self, + cmdargs, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + stdin=CLOSE_STDIN, + **kw + ): + """Invoke subprocess.Popen. + + This calls subprocess.Popen making sure the current working directory + is in the PYTHONPATH. + + You probably want to use :py:meth:`run` instead. + + """ + env = os.environ.copy() + env["PYTHONPATH"] = os.pathsep.join( + filter(None, [os.getcwd(), env.get("PYTHONPATH", "")]) + ) + env.update(self._env_run_update) + kw["env"] = env + + if stdin is Testdir.CLOSE_STDIN: + kw["stdin"] = subprocess.PIPE + elif isinstance(stdin, bytes): + kw["stdin"] = subprocess.PIPE + else: + kw["stdin"] = stdin + + popen = subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) + if stdin is Testdir.CLOSE_STDIN: + popen.stdin.close() + elif isinstance(stdin, bytes): + popen.stdin.write(stdin) + + return popen + + def run(self, *cmdargs, **kwargs): + """Run a command with arguments. + + Run a process using subprocess.Popen saving the stdout and stderr. + + :param args: the sequence of arguments to pass to `subprocess.Popen()` + :param timeout: the period in seconds after which to timeout and raise + :py:class:`Testdir.TimeoutExpired` + :param stdin: optional standard input. Bytes are being send, closing + the pipe, otherwise it is passed through to ``popen``. + Defaults to ``CLOSE_STDIN``, which translates to using a pipe + (``subprocess.PIPE``) that gets closed. + + Returns a :py:class:`RunResult`. + + """ + __tracebackhide__ = True + + timeout = kwargs.pop("timeout", None) + stdin = kwargs.pop("stdin", Testdir.CLOSE_STDIN) + raise_on_kwargs(kwargs) + + cmdargs = [ + str(arg) if isinstance(arg, py.path.local) else arg for arg in cmdargs + ] + p1 = self.tmpdir.join("stdout") + p2 = self.tmpdir.join("stderr") + print("running:", *cmdargs) + print(" in:", py.path.local()) + f1 = codecs.open(str(p1), "w", encoding="utf8") + f2 = codecs.open(str(p2), "w", encoding="utf8") + try: + now = time.time() + popen = self.popen( + cmdargs, + stdin=stdin, + stdout=f1, + stderr=f2, + close_fds=(sys.platform != "win32"), + ) + if isinstance(stdin, bytes): + popen.stdin.close() + + def handle_timeout(): + __tracebackhide__ = True + + timeout_message = ( + "{seconds} second timeout expired running:" + " {command}".format(seconds=timeout, command=cmdargs) + ) + + popen.kill() + popen.wait() + raise self.TimeoutExpired(timeout_message) + + if timeout is None: + ret = popen.wait() + elif not six.PY2: + try: + ret = popen.wait(timeout) + except subprocess.TimeoutExpired: + handle_timeout() + else: + end = time.time() + timeout + + resolution = min(0.1, timeout / 10) + + while True: + ret = popen.poll() + if ret is not None: + break + + if time.time() > end: + handle_timeout() + + time.sleep(resolution) + finally: + f1.close() + f2.close() + f1 = codecs.open(str(p1), "r", encoding="utf8") + f2 = codecs.open(str(p2), "r", encoding="utf8") + try: + out = f1.read().splitlines() + err = f2.read().splitlines() + finally: + f1.close() + f2.close() + self._dump_lines(out, sys.stdout) + self._dump_lines(err, sys.stderr) + return RunResult(ret, out, err, time.time() - now) + + def _dump_lines(self, lines, fp): + try: + for line in lines: + print(line, file=fp) + except UnicodeEncodeError: + print("couldn't print to %s because of encoding" % (fp,)) + + def _getpytestargs(self): + return sys.executable, "-mpytest" + + def runpython(self, script): + """Run a python script using sys.executable as interpreter. + + Returns a :py:class:`RunResult`. + + """ + return self.run(sys.executable, script) + + def runpython_c(self, command): + """Run python -c "command", return a :py:class:`RunResult`.""" + return self.run(sys.executable, "-c", command) + + def runpytest_subprocess(self, *args, **kwargs): + """Run pytest as a subprocess with given arguments. + + Any plugins added to the :py:attr:`plugins` list will be added using the + ``-p`` command line option. Additionally ``--basetemp`` is used to put + any temporary files and directories in a numbered directory prefixed + with "runpytest-" to not conflict with the normal numbered pytest + location for temporary files and directories. + + :param args: the sequence of arguments to pass to the pytest subprocess + :param timeout: the period in seconds after which to timeout and raise + :py:class:`Testdir.TimeoutExpired` + + Returns a :py:class:`RunResult`. + """ + __tracebackhide__ = True + timeout = kwargs.pop("timeout", None) + raise_on_kwargs(kwargs) + + p = py.path.local.make_numbered_dir( + prefix="runpytest-", keep=None, rootdir=self.tmpdir + ) + args = ("--basetemp=%s" % p,) + args + plugins = [x for x in self.plugins if isinstance(x, str)] + if plugins: + args = ("-p", plugins[0]) + args + args = self._getpytestargs() + args + return self.run(*args, timeout=timeout) + + def spawn_pytest(self, string, expect_timeout=10.0): + """Run pytest using pexpect. + + This makes sure to use the right pytest and sets up the temporary + directory locations. + + The pexpect child is returned. + + """ + basetemp = self.tmpdir.mkdir("temp-pexpect") + invoke = " ".join(map(str, self._getpytestargs())) + cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string) + return self.spawn(cmd, expect_timeout=expect_timeout) + + def spawn(self, cmd, expect_timeout=10.0): + """Run a command using pexpect. + + The pexpect child is returned. + + """ + pexpect = pytest.importorskip("pexpect", "3.0") + if hasattr(sys, "pypy_version_info") and "64" in platform.machine(): + pytest.skip("pypy-64 bit not supported") + if sys.platform.startswith("freebsd"): + pytest.xfail("pexpect does not work reliably on freebsd") + logfile = self.tmpdir.join("spawn.out").open("wb") + + # Do not load user config. + env = os.environ.copy() + env.update(self._env_run_update) + + child = pexpect.spawn(cmd, logfile=logfile, env=env) + self.request.addfinalizer(logfile.close) + child.timeout = expect_timeout + return child + + +def getdecoded(out): + try: + return out.decode("utf-8") + except UnicodeDecodeError: + return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (saferepr(out),) + + +class LineComp(object): + def __init__(self): + self.stringio = py.io.TextIO() + + def assert_contains_lines(self, lines2): + """Assert that lines2 are contained (linearly) in lines1. + + Return a list of extralines found. + + """ + __tracebackhide__ = True + val = self.stringio.getvalue() + self.stringio.truncate(0) + self.stringio.seek(0) + lines1 = val.split("\n") + return LineMatcher(lines1).fnmatch_lines(lines2) + + +class LineMatcher(object): + """Flexible matching of text. + + This is a convenience class to test large texts like the output of + commands. + + The constructor takes a list of lines without their trailing newlines, i.e. + ``text.splitlines()``. + + """ + + def __init__(self, lines): + self.lines = lines + self._log_output = [] + + def str(self): + """Return the entire original text.""" + return "\n".join(self.lines) + + def _getlines(self, lines2): + if isinstance(lines2, str): + lines2 = Source(lines2) + if isinstance(lines2, Source): + lines2 = lines2.strip().lines + return lines2 + + def fnmatch_lines_random(self, lines2): + """Check lines exist in the output using in any order. + + Lines are checked using ``fnmatch.fnmatch``. The argument is a list of + lines which have to occur in the output, in any order. + + """ + self._match_lines_random(lines2, fnmatch) + + def re_match_lines_random(self, lines2): + """Check lines exist in the output using ``re.match``, in any order. + + The argument is a list of lines which have to occur in the output, in + any order. + + """ + self._match_lines_random(lines2, lambda name, pat: re.match(pat, name)) + + def _match_lines_random(self, lines2, match_func): + """Check lines exist in the output. + + The argument is a list of lines which have to occur in the output, in + any order. Each line can contain glob whildcards. + + """ + lines2 = self._getlines(lines2) + for line in lines2: + for x in self.lines: + if line == x or match_func(x, line): + self._log("matched: ", repr(line)) + break + else: + self._log("line %r not found in output" % line) + raise ValueError(self._log_text) + + def get_lines_after(self, fnline): + """Return all lines following the given line in the text. + + The given line can contain glob wildcards. + + """ + for i, line in enumerate(self.lines): + if fnline == line or fnmatch(line, fnline): + return self.lines[i + 1 :] + raise ValueError("line %r not found in output" % fnline) + + def _log(self, *args): + self._log_output.append(" ".join(str(x) for x in args)) + + @property + def _log_text(self): + return "\n".join(self._log_output) + + def fnmatch_lines(self, lines2): + """Search captured text for matching lines using ``fnmatch.fnmatch``. + + The argument is a list of lines which have to match and can use glob + wildcards. If they do not match a pytest.fail() is called. The + matches and non-matches are also printed on stdout. + + """ + __tracebackhide__ = True + self._match_lines(lines2, fnmatch, "fnmatch") + + def re_match_lines(self, lines2): + """Search captured text for matching lines using ``re.match``. + + The argument is a list of lines which have to match using ``re.match``. + If they do not match a pytest.fail() is called. + + The matches and non-matches are also printed on stdout. + + """ + __tracebackhide__ = True + self._match_lines(lines2, lambda name, pat: re.match(pat, name), "re.match") + + def _match_lines(self, lines2, match_func, match_nickname): + """Underlying implementation of ``fnmatch_lines`` and ``re_match_lines``. + + :param list[str] lines2: list of string patterns to match. The actual + format depends on ``match_func`` + :param match_func: a callable ``match_func(line, pattern)`` where line + is the captured line from stdout/stderr and pattern is the matching + pattern + :param str match_nickname: the nickname for the match function that + will be logged to stdout when a match occurs + + """ + assert isinstance(lines2, Sequence) + lines2 = self._getlines(lines2) + lines1 = self.lines[:] + nextline = None + extralines = [] + __tracebackhide__ = True + for line in lines2: + nomatchprinted = False + while lines1: + nextline = lines1.pop(0) + if line == nextline: + self._log("exact match:", repr(line)) + break + elif match_func(nextline, line): + self._log("%s:" % match_nickname, repr(line)) + self._log(" with:", repr(nextline)) + break + else: + if not nomatchprinted: + self._log("nomatch:", repr(line)) + nomatchprinted = True + self._log(" and:", repr(nextline)) + extralines.append(nextline) + else: + self._log("remains unmatched: %r" % (line,)) + pytest.fail(self._log_text) diff --git a/venv/lib/python2.7/site-packages/_pytest/python.py b/venv/lib/python2.7/site-packages/_pytest/python.py new file mode 100644 index 0000000..5f1e688 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/python.py @@ -0,0 +1,1480 @@ +# -*- coding: utf-8 -*- +""" Python test discovery, setup and run of test functions. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import collections +import fnmatch +import inspect +import os +import sys +import warnings +from functools import partial +from textwrap import dedent + +import py +import six + +import _pytest +from _pytest import deprecated +from _pytest import fixtures +from _pytest import nodes +from _pytest._code import filter_traceback +from _pytest.compat import ascii_escaped +from _pytest.compat import enum +from _pytest.compat import get_default_arg_names +from _pytest.compat import get_real_func +from _pytest.compat import getfslineno +from _pytest.compat import getimfunc +from _pytest.compat import getlocation +from _pytest.compat import is_generator +from _pytest.compat import isclass +from _pytest.compat import isfunction +from _pytest.compat import NOTSET +from _pytest.compat import REGEX_TYPE +from _pytest.compat import safe_getattr +from _pytest.compat import safe_isclass +from _pytest.compat import safe_str +from _pytest.compat import STRING_TYPES +from _pytest.config import hookimpl +from _pytest.main import FSHookProxy +from _pytest.mark import MARK_GEN +from _pytest.mark.structures import get_unpacked_marks +from _pytest.mark.structures import normalize_mark_list +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.pathlib import parts +from _pytest.warning_types import PytestCollectionWarning +from _pytest.warning_types import PytestUnhandledCoroutineWarning + + +def pyobj_property(name): + def get(self): + node = self.getparent(getattr(__import__("pytest"), name)) + if node is not None: + return node.obj + + doc = "python %s object this node was collected from (can be None)." % ( + name.lower(), + ) + return property(get, None, None, doc) + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption( + "--fixtures", + "--funcargs", + action="store_true", + dest="showfixtures", + default=False, + help="show available fixtures, sorted by plugin appearance " + "(fixtures with leading '_' are only shown with '-v')", + ) + group.addoption( + "--fixtures-per-test", + action="store_true", + dest="show_fixtures_per_test", + default=False, + help="show fixtures per test", + ) + parser.addini( + "python_files", + type="args", + # NOTE: default is also used in AssertionRewritingHook. + default=["test_*.py", "*_test.py"], + help="glob-style file patterns for Python test module discovery", + ) + parser.addini( + "python_classes", + type="args", + default=["Test"], + help="prefixes or glob names for Python test class discovery", + ) + parser.addini( + "python_functions", + type="args", + default=["test"], + help="prefixes or glob names for Python test function and method discovery", + ) + parser.addini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support", + type="bool", + default=False, + help="disable string escape non-ascii characters, might cause unwanted " + "side effects(use at your own risk)", + ) + + group.addoption( + "--import-mode", + default="prepend", + choices=["prepend", "append"], + dest="importmode", + help="prepend/append to sys.path when importing test modules, " + "default is to prepend.", + ) + + +def pytest_cmdline_main(config): + if config.option.showfixtures: + showfixtures(config) + return 0 + if config.option.show_fixtures_per_test: + show_fixtures_per_test(config) + return 0 + + +def pytest_generate_tests(metafunc): + # those alternative spellings are common - raise a specific error to alert + # the user + alt_spellings = ["parameterize", "parametrise", "parameterise"] + for mark_name in alt_spellings: + if metafunc.definition.get_closest_marker(mark_name): + msg = "{0} has '{1}' mark, spelling should be 'parametrize'" + fail(msg.format(metafunc.function.__name__, mark_name), pytrace=False) + for marker in metafunc.definition.iter_markers(name="parametrize"): + metafunc.parametrize(*marker.args, **marker.kwargs) + + +def pytest_configure(config): + config.addinivalue_line( + "markers", + "parametrize(argnames, argvalues): call a test function multiple " + "times passing in different arguments in turn. argvalues generally " + "needs to be a list of values if argnames specifies only one name " + "or a list of tuples of values if argnames specifies multiple names. " + "Example: @parametrize('arg1', [1,2]) would lead to two calls of the " + "decorated test function, one with arg1=1 and another with arg1=2." + "see https://docs.pytest.org/en/latest/parametrize.html for more info " + "and examples.", + ) + config.addinivalue_line( + "markers", + "usefixtures(fixturename1, fixturename2, ...): mark tests as needing " + "all of the specified fixtures. see " + "https://docs.pytest.org/en/latest/fixture.html#usefixtures ", + ) + + +@hookimpl(trylast=True) +def pytest_pyfunc_call(pyfuncitem): + testfunction = pyfuncitem.obj + iscoroutinefunction = getattr(inspect, "iscoroutinefunction", None) + if iscoroutinefunction is not None and iscoroutinefunction(testfunction): + msg = "Coroutine functions are not natively supported and have been skipped.\n" + msg += "You need to install a suitable plugin for your async framework, for example:\n" + msg += " - pytest-asyncio\n" + msg += " - pytest-trio\n" + msg += " - pytest-tornasync" + warnings.warn(PytestUnhandledCoroutineWarning(msg.format(pyfuncitem.nodeid))) + skip(msg="coroutine function and no async plugin installed (see warnings)") + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + testfunction(**testargs) + return True + + +def pytest_collect_file(path, parent): + ext = path.ext + if ext == ".py": + if not parent.session.isinitpath(path): + if not path_matches_patterns( + path, parent.config.getini("python_files") + ["__init__.py"] + ): + return + ihook = parent.session.gethookproxy(path) + return ihook.pytest_pycollect_makemodule(path=path, parent=parent) + + +def path_matches_patterns(path, patterns): + """Returns True if the given py.path.local matches one of the patterns in the list of globs given""" + return any(path.fnmatch(pattern) for pattern in patterns) + + +def pytest_pycollect_makemodule(path, parent): + if path.basename == "__init__.py": + return Package(path, parent) + return Module(path, parent) + + +@hookimpl(hookwrapper=True) +def pytest_pycollect_makeitem(collector, name, obj): + outcome = yield + res = outcome.get_result() + if res is not None: + return + # nothing was collected elsewhere, let's do it here + if safe_isclass(obj): + if collector.istestclass(obj, name): + outcome.force_result(Class(name, parent=collector)) + elif collector.istestfunction(obj, name): + # mock seems to store unbound methods (issue473), normalize it + obj = getattr(obj, "__func__", obj) + # We need to try and unwrap the function if it's a functools.partial + # or a funtools.wrapped. + # We musn't if it's been wrapped with mock.patch (python 2 only) + if not (isfunction(obj) or isfunction(get_real_func(obj))): + filename, lineno = getfslineno(obj) + warnings.warn_explicit( + message=PytestCollectionWarning( + "cannot collect %r because it is not a function." % name + ), + category=None, + filename=str(filename), + lineno=lineno + 1, + ) + elif getattr(obj, "__test__", True): + if is_generator(obj): + res = Function(name, parent=collector) + reason = deprecated.YIELD_TESTS.format(name=name) + res.add_marker(MARK_GEN.xfail(run=False, reason=reason)) + res.warn(PytestCollectionWarning(reason)) + else: + res = list(collector._genfunctions(name, obj)) + outcome.force_result(res) + + +def pytest_make_parametrize_id(config, val, argname=None): + return None + + +class PyobjContext(object): + module = pyobj_property("Module") + cls = pyobj_property("Class") + instance = pyobj_property("Instance") + + +class PyobjMixin(PyobjContext): + _ALLOW_MARKERS = True + + def __init__(self, *k, **kw): + super(PyobjMixin, self).__init__(*k, **kw) + + @property + def obj(self): + """Underlying Python object.""" + obj = getattr(self, "_obj", None) + if obj is None: + self._obj = obj = self._getobj() + # XXX evil hack + # used to avoid Instance collector marker duplication + if self._ALLOW_MARKERS: + self.own_markers.extend(get_unpacked_marks(self.obj)) + return obj + + @obj.setter + def obj(self, value): + self._obj = value + + def _getobj(self): + """Gets the underlying Python object. May be overwritten by subclasses.""" + return getattr(self.parent.obj, self.name) + + def getmodpath(self, stopatmodule=True, includemodule=False): + """ return python path relative to the containing module. """ + chain = self.listchain() + chain.reverse() + parts = [] + for node in chain: + if isinstance(node, Instance): + continue + name = node.name + if isinstance(node, Module): + name = os.path.splitext(name)[0] + if stopatmodule: + if includemodule: + parts.append(name) + break + parts.append(name) + parts.reverse() + s = ".".join(parts) + return s.replace(".[", "[") + + def reportinfo(self): + # XXX caching? + obj = self.obj + compat_co_firstlineno = getattr(obj, "compat_co_firstlineno", None) + if isinstance(compat_co_firstlineno, int): + # nose compatibility + fspath = sys.modules[obj.__module__].__file__ + if fspath.endswith(".pyc"): + fspath = fspath[:-1] + lineno = compat_co_firstlineno + else: + fspath, lineno = getfslineno(obj) + modpath = self.getmodpath() + assert isinstance(lineno, int) + return fspath, lineno, modpath + + +class PyCollector(PyobjMixin, nodes.Collector): + def funcnamefilter(self, name): + return self._matches_prefix_or_glob_option("python_functions", name) + + def isnosetest(self, obj): + """ Look for the __test__ attribute, which is applied by the + @nose.tools.istest decorator + """ + # We explicitly check for "is True" here to not mistakenly treat + # classes with a custom __getattr__ returning something truthy (like a + # function) as test classes. + return safe_getattr(obj, "__test__", False) is True + + def classnamefilter(self, name): + return self._matches_prefix_or_glob_option("python_classes", name) + + def istestfunction(self, obj, name): + if self.funcnamefilter(name) or self.isnosetest(obj): + if isinstance(obj, staticmethod): + # static methods need to be unwrapped + obj = safe_getattr(obj, "__func__", False) + return ( + safe_getattr(obj, "__call__", False) + and fixtures.getfixturemarker(obj) is None + ) + else: + return False + + def istestclass(self, obj, name): + return self.classnamefilter(name) or self.isnosetest(obj) + + def _matches_prefix_or_glob_option(self, option_name, name): + """ + checks if the given name matches the prefix or glob-pattern defined + in ini configuration. + """ + for option in self.config.getini(option_name): + if name.startswith(option): + return True + # check that name looks like a glob-string before calling fnmatch + # because this is called for every name in each collected module, + # and fnmatch is somewhat expensive to call + elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch( + name, option + ): + return True + return False + + def collect(self): + if not getattr(self.obj, "__test__", True): + return [] + + # NB. we avoid random getattrs and peek in the __dict__ instead + # (XXX originally introduced from a PyPy need, still true?) + dicts = [getattr(self.obj, "__dict__", {})] + for basecls in inspect.getmro(self.obj.__class__): + dicts.append(basecls.__dict__) + seen = {} + values = [] + for dic in dicts: + for name, obj in list(dic.items()): + if name in seen: + continue + seen[name] = True + res = self._makeitem(name, obj) + if res is None: + continue + if not isinstance(res, list): + res = [res] + values.extend(res) + values.sort(key=lambda item: item.reportinfo()[:2]) + return values + + def _makeitem(self, name, obj): + # assert self.ihook.fspath == self.fspath, self + return self.ihook.pytest_pycollect_makeitem(collector=self, name=name, obj=obj) + + def _genfunctions(self, name, funcobj): + module = self.getparent(Module).obj + clscol = self.getparent(Class) + cls = clscol and clscol.obj or None + fm = self.session._fixturemanager + + definition = FunctionDefinition(name=name, parent=self, callobj=funcobj) + fixtureinfo = fm.getfixtureinfo(definition, funcobj, cls) + + metafunc = Metafunc( + definition, fixtureinfo, self.config, cls=cls, module=module + ) + methods = [] + if hasattr(module, "pytest_generate_tests"): + methods.append(module.pytest_generate_tests) + if hasattr(cls, "pytest_generate_tests"): + methods.append(cls().pytest_generate_tests) + if methods: + self.ihook.pytest_generate_tests.call_extra( + methods, dict(metafunc=metafunc) + ) + else: + self.ihook.pytest_generate_tests(metafunc=metafunc) + + if not metafunc._calls: + yield Function(name, parent=self, fixtureinfo=fixtureinfo) + else: + # add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs + fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm) + + # add_funcarg_pseudo_fixture_def may have shadowed some fixtures + # with direct parametrization, so make sure we update what the + # function really needs. + fixtureinfo.prune_dependency_tree() + + for callspec in metafunc._calls: + subname = "%s[%s]" % (name, callspec.id) + yield Function( + name=subname, + parent=self, + callspec=callspec, + callobj=funcobj, + fixtureinfo=fixtureinfo, + keywords={callspec.id: True}, + originalname=name, + ) + + +class Module(nodes.File, PyCollector): + """ Collector for test classes and functions. """ + + def _getobj(self): + return self._importtestmodule() + + def collect(self): + self._inject_setup_module_fixture() + self._inject_setup_function_fixture() + self.session._fixturemanager.parsefactories(self) + return super(Module, self).collect() + + def _inject_setup_module_fixture(self): + """Injects a hidden autouse, module scoped fixture into the collected module object + that invokes setUpModule/tearDownModule if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_module = _get_non_fixture_func(self.obj, "setUpModule") + if setup_module is None: + setup_module = _get_non_fixture_func(self.obj, "setup_module") + + teardown_module = _get_non_fixture_func(self.obj, "tearDownModule") + if teardown_module is None: + teardown_module = _get_non_fixture_func(self.obj, "teardown_module") + + if setup_module is None and teardown_module is None: + return + + @fixtures.fixture(autouse=True, scope="module") + def xunit_setup_module_fixture(request): + if setup_module is not None: + _call_with_optional_argument(setup_module, request.module) + yield + if teardown_module is not None: + _call_with_optional_argument(teardown_module, request.module) + + self.obj.__pytest_setup_module = xunit_setup_module_fixture + + def _inject_setup_function_fixture(self): + """Injects a hidden autouse, function scoped fixture into the collected module object + that invokes setup_function/teardown_function if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_function = _get_non_fixture_func(self.obj, "setup_function") + teardown_function = _get_non_fixture_func(self.obj, "teardown_function") + if setup_function is None and teardown_function is None: + return + + @fixtures.fixture(autouse=True, scope="function") + def xunit_setup_function_fixture(request): + if request.instance is not None: + # in this case we are bound to an instance, so we need to let + # setup_method handle this + yield + return + if setup_function is not None: + _call_with_optional_argument(setup_function, request.function) + yield + if teardown_function is not None: + _call_with_optional_argument(teardown_function, request.function) + + self.obj.__pytest_setup_function = xunit_setup_function_fixture + + def _importtestmodule(self): + # we assume we are only called once per module + importmode = self.config.getoption("--import-mode") + try: + mod = self.fspath.pyimport(ensuresyspath=importmode) + except SyntaxError: + raise self.CollectError( + _pytest._code.ExceptionInfo.from_current().getrepr(style="short") + ) + except self.fspath.ImportMismatchError: + e = sys.exc_info()[1] + raise self.CollectError( + "import file mismatch:\n" + "imported module %r has this __file__ attribute:\n" + " %s\n" + "which is not the same as the test file we want to collect:\n" + " %s\n" + "HINT: remove __pycache__ / .pyc files and/or use a " + "unique basename for your test file modules" % e.args + ) + except ImportError: + from _pytest._code.code import ExceptionInfo + + exc_info = ExceptionInfo.from_current() + if self.config.getoption("verbose") < 2: + exc_info.traceback = exc_info.traceback.filter(filter_traceback) + exc_repr = ( + exc_info.getrepr(style="short") + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = safe_str(exc_repr) + raise self.CollectError( + "ImportError while importing test module '{fspath}'.\n" + "Hint: make sure your test modules/packages have valid Python names.\n" + "Traceback:\n" + "{traceback}".format(fspath=self.fspath, traceback=formatted_tb) + ) + except _pytest.runner.Skipped as e: + if e.allow_module_level: + raise + raise self.CollectError( + "Using pytest.skip outside of a test is not allowed. " + "To decorate a test function, use the @pytest.mark.skip " + "or @pytest.mark.skipif decorators instead, and to skip a " + "module use `pytestmark = pytest.mark.{skip,skipif}." + ) + self.config.pluginmanager.consider_module(mod) + return mod + + +class Package(Module): + def __init__(self, fspath, parent=None, config=None, session=None, nodeid=None): + session = parent.session + nodes.FSCollector.__init__( + self, fspath, parent=parent, config=config, session=session, nodeid=nodeid + ) + self.name = fspath.dirname + self.trace = session.trace + self._norecursepatterns = session._norecursepatterns + self.fspath = fspath + + def setup(self): + # not using fixtures to call setup_module here because autouse fixtures + # from packages are not called automatically (#4085) + setup_module = _get_non_fixture_func(self.obj, "setUpModule") + if setup_module is None: + setup_module = _get_non_fixture_func(self.obj, "setup_module") + if setup_module is not None: + _call_with_optional_argument(setup_module, self.obj) + + teardown_module = _get_non_fixture_func(self.obj, "tearDownModule") + if teardown_module is None: + teardown_module = _get_non_fixture_func(self.obj, "teardown_module") + if teardown_module is not None: + func = partial(_call_with_optional_argument, teardown_module, self.obj) + self.addfinalizer(func) + + def _recurse(self, dirpath): + if dirpath.basename == "__pycache__": + return False + ihook = self.gethookproxy(dirpath.dirpath()) + if ihook.pytest_ignore_collect(path=dirpath, config=self.config): + return + for pat in self._norecursepatterns: + if dirpath.check(fnmatch=pat): + return False + ihook = self.gethookproxy(dirpath) + ihook.pytest_collect_directory(path=dirpath, parent=self) + return True + + def gethookproxy(self, fspath): + # check if we have the common case of running + # hooks with all conftest.py filesall conftest.py + pm = self.config.pluginmanager + my_conftestmodules = pm._getconftestmodules(fspath) + remove_mods = pm._conftest_plugins.difference(my_conftestmodules) + if remove_mods: + # one or more conftests are not in use at this fspath + proxy = FSHookProxy(fspath, pm, remove_mods) + else: + # all plugis are active for this fspath + proxy = self.config.hook + return proxy + + def _collectfile(self, path, handle_dupes=True): + assert path.isfile(), "%r is not a file (isdir=%r, exists=%r, islink=%r)" % ( + path, + path.isdir(), + path.exists(), + path.islink(), + ) + ihook = self.gethookproxy(path) + if not self.isinitpath(path): + if ihook.pytest_ignore_collect(path=path, config=self.config): + return () + + if handle_dupes: + keepduplicates = self.config.getoption("keepduplicates") + if not keepduplicates: + duplicate_paths = self.config.pluginmanager._duplicatepaths + if path in duplicate_paths: + return () + else: + duplicate_paths.add(path) + + if self.fspath == path: # __init__.py + return [self] + + return ihook.pytest_collect_file(path=path, parent=self) + + def isinitpath(self, path): + return path in self.session._initialpaths + + def collect(self): + this_path = self.fspath.dirpath() + init_module = this_path.join("__init__.py") + if init_module.check(file=1) and path_matches_patterns( + init_module, self.config.getini("python_files") + ): + yield Module(init_module, self) + pkg_prefixes = set() + for path in this_path.visit(rec=self._recurse, bf=True, sort=True): + # We will visit our own __init__.py file, in which case we skip it. + is_file = path.isfile() + if is_file: + if path.basename == "__init__.py" and path.dirpath() == this_path: + continue + + parts_ = parts(path.strpath) + if any( + pkg_prefix in parts_ and pkg_prefix.join("__init__.py") != path + for pkg_prefix in pkg_prefixes + ): + continue + + if is_file: + for x in self._collectfile(path): + yield x + elif not path.isdir(): + # Broken symlink or invalid/missing file. + continue + elif path.join("__init__.py").check(file=1): + pkg_prefixes.add(path) + + +def _get_xunit_setup_teardown(holder, attr_name, param_obj=None): + """ + Return a callable to perform xunit-style setup or teardown if + the function exists in the ``holder`` object. + The ``param_obj`` parameter is the parameter which will be passed to the function + when the callable is called without arguments, defaults to the ``holder`` object. + Return ``None`` if a suitable callable is not found. + """ + # TODO: only needed because of Package! + param_obj = param_obj if param_obj is not None else holder + result = _get_non_fixture_func(holder, attr_name) + if result is not None: + arg_count = result.__code__.co_argcount + if inspect.ismethod(result): + arg_count -= 1 + if arg_count: + return lambda: result(param_obj) + else: + return result + + +def _call_with_optional_argument(func, arg): + """Call the given function with the given argument if func accepts one argument, otherwise + calls func without arguments""" + arg_count = func.__code__.co_argcount + if inspect.ismethod(func): + arg_count -= 1 + if arg_count: + func(arg) + else: + func() + + +def _get_non_fixture_func(obj, name): + """Return the attribute from the given object to be used as a setup/teardown + xunit-style function, but only if not marked as a fixture to + avoid calling it twice. + """ + meth = getattr(obj, name, None) + if fixtures.getfixturemarker(meth) is None: + return meth + + +class Class(PyCollector): + """ Collector for test methods. """ + + def collect(self): + if not safe_getattr(self.obj, "__test__", True): + return [] + if hasinit(self.obj): + self.warn( + PytestCollectionWarning( + "cannot collect test class %r because it has a " + "__init__ constructor (from: %s)" + % (self.obj.__name__, self.parent.nodeid) + ) + ) + return [] + elif hasnew(self.obj): + self.warn( + PytestCollectionWarning( + "cannot collect test class %r because it has a " + "__new__ constructor (from: %s)" + % (self.obj.__name__, self.parent.nodeid) + ) + ) + return [] + + self._inject_setup_class_fixture() + self._inject_setup_method_fixture() + + return [Instance(name="()", parent=self)] + + def _inject_setup_class_fixture(self): + """Injects a hidden autouse, class scoped fixture into the collected class object + that invokes setup_class/teardown_class if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_class = _get_non_fixture_func(self.obj, "setup_class") + teardown_class = getattr(self.obj, "teardown_class", None) + if setup_class is None and teardown_class is None: + return + + @fixtures.fixture(autouse=True, scope="class") + def xunit_setup_class_fixture(cls): + if setup_class is not None: + func = getimfunc(setup_class) + _call_with_optional_argument(func, self.obj) + yield + if teardown_class is not None: + func = getimfunc(teardown_class) + _call_with_optional_argument(func, self.obj) + + self.obj.__pytest_setup_class = xunit_setup_class_fixture + + def _inject_setup_method_fixture(self): + """Injects a hidden autouse, function scoped fixture into the collected class object + that invokes setup_method/teardown_method if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_method = _get_non_fixture_func(self.obj, "setup_method") + teardown_method = getattr(self.obj, "teardown_method", None) + if setup_method is None and teardown_method is None: + return + + @fixtures.fixture(autouse=True, scope="function") + def xunit_setup_method_fixture(self, request): + method = request.function + if setup_method is not None: + func = getattr(self, "setup_method") + _call_with_optional_argument(func, method) + yield + if teardown_method is not None: + func = getattr(self, "teardown_method") + _call_with_optional_argument(func, method) + + self.obj.__pytest_setup_method = xunit_setup_method_fixture + + +class Instance(PyCollector): + _ALLOW_MARKERS = False # hack, destroy later + # instances share the object with their parents in a way + # that duplicates markers instances if not taken out + # can be removed at node structure reorganization time + + def _getobj(self): + return self.parent.obj() + + def collect(self): + self.session._fixturemanager.parsefactories(self) + return super(Instance, self).collect() + + def newinstance(self): + self.obj = self._getobj() + return self.obj + + +class FunctionMixin(PyobjMixin): + """ mixin for the code common to Function and Generator. + """ + + def setup(self): + """ perform setup for this test function. """ + if isinstance(self.parent, Instance): + self.parent.newinstance() + self.obj = self._getobj() + + def _prunetraceback(self, excinfo): + if hasattr(self, "_obj") and not self.config.getoption("fulltrace", False): + code = _pytest._code.Code(get_real_func(self.obj)) + path, firstlineno = code.path, code.firstlineno + traceback = excinfo.traceback + ntraceback = traceback.cut(path=path, firstlineno=firstlineno) + if ntraceback == traceback: + ntraceback = ntraceback.cut(path=path) + if ntraceback == traceback: + ntraceback = ntraceback.filter(filter_traceback) + if not ntraceback: + ntraceback = traceback + + excinfo.traceback = ntraceback.filter() + # issue364: mark all but first and last frames to + # only show a single-line message for each frame + if self.config.getoption("tbstyle", "auto") == "auto": + if len(excinfo.traceback) > 2: + for entry in excinfo.traceback[1:-1]: + entry.set_repr_style("short") + + def repr_failure(self, excinfo, outerr=None): + assert outerr is None, "XXX outerr usage is deprecated" + style = self.config.getoption("tbstyle", "auto") + if style == "auto": + style = "long" + return self._repr_failure_py(excinfo, style=style) + + +def hasinit(obj): + init = getattr(obj, "__init__", None) + if init: + return init != object.__init__ + + +def hasnew(obj): + new = getattr(obj, "__new__", None) + if new: + return new != object.__new__ + + +class CallSpec2(object): + def __init__(self, metafunc): + self.metafunc = metafunc + self.funcargs = {} + self._idlist = [] + self.params = {} + self._globalid = NOTSET + self._globalparam = NOTSET + self._arg2scopenum = {} # used for sorting parametrized resources + self.marks = [] + self.indices = {} + + def copy(self): + cs = CallSpec2(self.metafunc) + cs.funcargs.update(self.funcargs) + cs.params.update(self.params) + cs.marks.extend(self.marks) + cs.indices.update(self.indices) + cs._arg2scopenum.update(self._arg2scopenum) + cs._idlist = list(self._idlist) + cs._globalid = self._globalid + cs._globalparam = self._globalparam + return cs + + def _checkargnotcontained(self, arg): + if arg in self.params or arg in self.funcargs: + raise ValueError("duplicate %r" % (arg,)) + + def getparam(self, name): + try: + return self.params[name] + except KeyError: + if self._globalparam is NOTSET: + raise ValueError(name) + return self._globalparam + + @property + def id(self): + return "-".join(map(str, filter(None, self._idlist))) + + def setmulti2(self, valtypes, argnames, valset, id, marks, scopenum, param_index): + for arg, val in zip(argnames, valset): + self._checkargnotcontained(arg) + valtype_for_arg = valtypes[arg] + getattr(self, valtype_for_arg)[arg] = val + self.indices[arg] = param_index + self._arg2scopenum[arg] = scopenum + self._idlist.append(id) + self.marks.extend(normalize_mark_list(marks)) + + def setall(self, funcargs, id, param): + for x in funcargs: + self._checkargnotcontained(x) + self.funcargs.update(funcargs) + if id is not NOTSET: + self._idlist.append(id) + if param is not NOTSET: + assert self._globalparam is NOTSET + self._globalparam = param + for arg in funcargs: + self._arg2scopenum[arg] = fixtures.scopenum_function + + +class Metafunc(fixtures.FuncargnamesCompatAttr): + """ + Metafunc objects are passed to the :func:`pytest_generate_tests <_pytest.hookspec.pytest_generate_tests>` hook. + They help to inspect a test function and to generate tests according to + test configuration or values specified in the class or module where a + test function is defined. + """ + + def __init__(self, definition, fixtureinfo, config, cls=None, module=None): + assert ( + isinstance(definition, FunctionDefinition) + or type(definition).__name__ == "DefinitionMock" + ) + self.definition = definition + + #: access to the :class:`_pytest.config.Config` object for the test session + self.config = config + + #: the module object where the test function is defined in. + self.module = module + + #: underlying python test function + self.function = definition.obj + + #: set of fixture names required by the test function + self.fixturenames = fixtureinfo.names_closure + + #: class object where the test function is defined in or ``None``. + self.cls = cls + + self._calls = [] + self._ids = set() + self._arg2fixturedefs = fixtureinfo.name2fixturedefs + + def parametrize(self, argnames, argvalues, indirect=False, ids=None, scope=None): + """ Add new invocations to the underlying test function using the list + of argvalues for the given argnames. Parametrization is performed + during the collection phase. If you need to setup expensive resources + see about setting indirect to do it rather at test setup time. + + :arg argnames: a comma-separated string denoting one or more argument + names, or a list/tuple of argument strings. + + :arg argvalues: The list of argvalues determines how often a + test is invoked with different argument values. If only one + argname was specified argvalues is a list of values. If N + argnames were specified, argvalues must be a list of N-tuples, + where each tuple-element specifies a value for its respective + argname. + + :arg indirect: The list of argnames or boolean. A list of arguments' + names (subset of argnames). If True the list contains all names from + the argnames. Each argvalue corresponding to an argname in this list will + be passed as request.param to its respective argname fixture + function so that it can perform more expensive setups during the + setup phase of a test rather than at collection time. + + :arg ids: list of string ids, or a callable. + If strings, each is corresponding to the argvalues so that they are + part of the test id. If None is given as id of specific test, the + automatically generated id for that argument will be used. + If callable, it should take one argument (a single argvalue) and return + a string or return None. If None, the automatically generated id for that + argument will be used. + If no ids are provided they will be generated automatically from + the argvalues. + + :arg scope: if specified it denotes the scope of the parameters. + The scope is used for grouping tests by parameter instances. + It will also override any fixture-function defined scope, allowing + to set a dynamic scope using test context or configuration. + """ + from _pytest.fixtures import scope2index + from _pytest.mark import ParameterSet + + argnames, parameters = ParameterSet._for_parametrize( + argnames, + argvalues, + self.function, + self.config, + function_definition=self.definition, + ) + del argvalues + + if scope is None: + scope = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect) + + self._validate_if_using_arg_names(argnames, indirect) + + arg_values_types = self._resolve_arg_value_types(argnames, indirect) + + ids = self._resolve_arg_ids(argnames, ids, parameters, item=self.definition) + + scopenum = scope2index( + scope, descr="parametrize() call in {}".format(self.function.__name__) + ) + + # create the new calls: if we are parametrize() multiple times (by applying the decorator + # more than once) then we accumulate those calls generating the cartesian product + # of all calls + newcalls = [] + for callspec in self._calls or [CallSpec2(self)]: + for param_index, (param_id, param_set) in enumerate(zip(ids, parameters)): + newcallspec = callspec.copy() + newcallspec.setmulti2( + arg_values_types, + argnames, + param_set.values, + param_id, + param_set.marks, + scopenum, + param_index, + ) + newcalls.append(newcallspec) + self._calls = newcalls + + def _resolve_arg_ids(self, argnames, ids, parameters, item): + """Resolves the actual ids for the given argnames, based on the ``ids`` parameter given + to ``parametrize``. + + :param List[str] argnames: list of argument names passed to ``parametrize()``. + :param ids: the ids parameter of the parametrized call (see docs). + :param List[ParameterSet] parameters: the list of parameter values, same size as ``argnames``. + :param Item item: the item that generated this parametrized call. + :rtype: List[str] + :return: the list of ids for each argname given + """ + from _pytest._io.saferepr import saferepr + + idfn = None + if callable(ids): + idfn = ids + ids = None + if ids: + func_name = self.function.__name__ + if len(ids) != len(parameters): + msg = "In {}: {} parameter sets specified, with different number of ids: {}" + fail(msg.format(func_name, len(parameters), len(ids)), pytrace=False) + for id_value in ids: + if id_value is not None and not isinstance(id_value, six.string_types): + msg = "In {}: ids must be list of strings, found: {} (type: {!r})" + fail( + msg.format(func_name, saferepr(id_value), type(id_value)), + pytrace=False, + ) + ids = idmaker(argnames, parameters, idfn, ids, self.config, item=item) + return ids + + def _resolve_arg_value_types(self, argnames, indirect): + """Resolves if each parametrized argument must be considered a parameter to a fixture or a "funcarg" + to the function, based on the ``indirect`` parameter of the parametrized() call. + + :param List[str] argnames: list of argument names passed to ``parametrize()``. + :param indirect: same ``indirect`` parameter of ``parametrize()``. + :rtype: Dict[str, str] + A dict mapping each arg name to either: + * "params" if the argname should be the parameter of a fixture of the same name. + * "funcargs" if the argname should be a parameter to the parametrized test function. + """ + valtypes = {} + if indirect is True: + valtypes = dict.fromkeys(argnames, "params") + elif indirect is False: + valtypes = dict.fromkeys(argnames, "funcargs") + elif isinstance(indirect, (tuple, list)): + valtypes = dict.fromkeys(argnames, "funcargs") + for arg in indirect: + if arg not in argnames: + fail( + "In {}: indirect fixture '{}' doesn't exist".format( + self.function.__name__, arg + ), + pytrace=False, + ) + valtypes[arg] = "params" + return valtypes + + def _validate_if_using_arg_names(self, argnames, indirect): + """ + Check if all argnames are being used, by default values, or directly/indirectly. + + :param List[str] argnames: list of argument names passed to ``parametrize()``. + :param indirect: same ``indirect`` parameter of ``parametrize()``. + :raise ValueError: if validation fails. + """ + default_arg_names = set(get_default_arg_names(self.function)) + func_name = self.function.__name__ + for arg in argnames: + if arg not in self.fixturenames: + if arg in default_arg_names: + fail( + "In {}: function already takes an argument '{}' with a default value".format( + func_name, arg + ), + pytrace=False, + ) + else: + if isinstance(indirect, (tuple, list)): + name = "fixture" if arg in indirect else "argument" + else: + name = "fixture" if indirect else "argument" + fail( + "In {}: function uses no {} '{}'".format(func_name, name, arg), + pytrace=False, + ) + + +def _find_parametrized_scope(argnames, arg2fixturedefs, indirect): + """Find the most appropriate scope for a parametrized call based on its arguments. + + When there's at least one direct argument, always use "function" scope. + + When a test function is parametrized and all its arguments are indirect + (e.g. fixtures), return the most narrow scope based on the fixtures used. + + Related to issue #1832, based on code posted by @Kingdread. + """ + from _pytest.fixtures import scopes + + if isinstance(indirect, (list, tuple)): + all_arguments_are_fixtures = len(indirect) == len(argnames) + else: + all_arguments_are_fixtures = bool(indirect) + + if all_arguments_are_fixtures: + fixturedefs = arg2fixturedefs or {} + used_scopes = [ + fixturedef[0].scope + for name, fixturedef in fixturedefs.items() + if name in argnames + ] + if used_scopes: + # Takes the most narrow scope from used fixtures + for scope in reversed(scopes): + if scope in used_scopes: + return scope + + return "function" + + +def _ascii_escaped_by_config(val, config): + if config is None: + escape_option = False + else: + escape_option = config.getini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support" + ) + return val if escape_option else ascii_escaped(val) + + +def _idval(val, argname, idx, idfn, item, config): + if idfn: + try: + generated_id = idfn(val) + if generated_id is not None: + val = generated_id + except Exception as e: + # See issue https://github.com/pytest-dev/pytest/issues/2169 + msg = "{}: error raised while trying to determine id of parameter '{}' at position {}\n" + msg = msg.format(item.nodeid, argname, idx) + # we only append the exception type and message because on Python 2 reraise does nothing + msg += " {}: {}\n".format(type(e).__name__, e) + six.raise_from(ValueError(msg), e) + elif config: + hook_id = config.hook.pytest_make_parametrize_id( + config=config, val=val, argname=argname + ) + if hook_id: + return hook_id + + if isinstance(val, STRING_TYPES): + return _ascii_escaped_by_config(val, config) + elif val is None or isinstance(val, (float, int, bool)): + return str(val) + elif isinstance(val, REGEX_TYPE): + return ascii_escaped(val.pattern) + elif enum is not None and isinstance(val, enum.Enum): + return str(val) + elif (isclass(val) or isfunction(val)) and hasattr(val, "__name__"): + return val.__name__ + return str(argname) + str(idx) + + +def _idvalset(idx, parameterset, argnames, idfn, ids, item, config): + if parameterset.id is not None: + return parameterset.id + if ids is None or (idx >= len(ids) or ids[idx] is None): + this_id = [ + _idval(val, argname, idx, idfn, item=item, config=config) + for val, argname in zip(parameterset.values, argnames) + ] + return "-".join(this_id) + else: + return _ascii_escaped_by_config(ids[idx], config) + + +def idmaker(argnames, parametersets, idfn=None, ids=None, config=None, item=None): + ids = [ + _idvalset(valindex, parameterset, argnames, idfn, ids, config=config, item=item) + for valindex, parameterset in enumerate(parametersets) + ] + if len(set(ids)) != len(ids): + # The ids are not unique + duplicates = [testid for testid in ids if ids.count(testid) > 1] + counters = collections.defaultdict(lambda: 0) + for index, testid in enumerate(ids): + if testid in duplicates: + ids[index] = testid + str(counters[testid]) + counters[testid] += 1 + return ids + + +def show_fixtures_per_test(config): + from _pytest.main import wrap_session + + return wrap_session(config, _show_fixtures_per_test) + + +def _show_fixtures_per_test(config, session): + import _pytest.config + + session.perform_collect() + curdir = py.path.local() + tw = _pytest.config.create_terminal_writer(config) + verbose = config.getvalue("verbose") + + def get_best_relpath(func): + loc = getlocation(func, curdir) + return curdir.bestrelpath(loc) + + def write_fixture(fixture_def): + argname = fixture_def.argname + if verbose <= 0 and argname.startswith("_"): + return + if verbose > 0: + bestrel = get_best_relpath(fixture_def.func) + funcargspec = "{} -- {}".format(argname, bestrel) + else: + funcargspec = argname + tw.line(funcargspec, green=True) + fixture_doc = fixture_def.func.__doc__ + if fixture_doc: + write_docstring(tw, fixture_doc) + else: + tw.line(" no docstring available", red=True) + + def write_item(item): + try: + info = item._fixtureinfo + except AttributeError: + # doctests items have no _fixtureinfo attribute + return + if not info.name2fixturedefs: + # this test item does not use any fixtures + return + tw.line() + tw.sep("-", "fixtures used by {}".format(item.name)) + tw.sep("-", "({})".format(get_best_relpath(item.function))) + # dict key not used in loop but needed for sorting + for _, fixturedefs in sorted(info.name2fixturedefs.items()): + assert fixturedefs is not None + if not fixturedefs: + continue + # last item is expected to be the one used by the test item + write_fixture(fixturedefs[-1]) + + for session_item in session.items: + write_item(session_item) + + +def showfixtures(config): + from _pytest.main import wrap_session + + return wrap_session(config, _showfixtures_main) + + +def _showfixtures_main(config, session): + import _pytest.config + + session.perform_collect() + curdir = py.path.local() + tw = _pytest.config.create_terminal_writer(config) + verbose = config.getvalue("verbose") + + fm = session._fixturemanager + + available = [] + seen = set() + + for argname, fixturedefs in fm._arg2fixturedefs.items(): + assert fixturedefs is not None + if not fixturedefs: + continue + for fixturedef in fixturedefs: + loc = getlocation(fixturedef.func, curdir) + if (fixturedef.argname, loc) in seen: + continue + seen.add((fixturedef.argname, loc)) + available.append( + ( + len(fixturedef.baseid), + fixturedef.func.__module__, + curdir.bestrelpath(loc), + fixturedef.argname, + fixturedef, + ) + ) + + available.sort() + currentmodule = None + for baseid, module, bestrel, argname, fixturedef in available: + if currentmodule != module: + if not module.startswith("_pytest."): + tw.line() + tw.sep("-", "fixtures defined from %s" % (module,)) + currentmodule = module + if verbose <= 0 and argname[0] == "_": + continue + tw.write(argname, green=True) + if fixturedef.scope != "function": + tw.write(" [%s scope]" % fixturedef.scope, cyan=True) + if verbose > 0: + tw.write(" -- %s" % bestrel, yellow=True) + tw.write("\n") + loc = getlocation(fixturedef.func, curdir) + doc = fixturedef.func.__doc__ or "" + if doc: + write_docstring(tw, doc) + else: + tw.line(" %s: no docstring available" % (loc,), red=True) + tw.line() + + +def write_docstring(tw, doc, indent=" "): + doc = doc.rstrip() + if "\n" in doc: + firstline, rest = doc.split("\n", 1) + else: + firstline, rest = doc, "" + + if firstline.strip(): + tw.line(indent + firstline.strip()) + + if rest: + for line in dedent(rest).split("\n"): + tw.write(indent + line + "\n") + + +class Function(FunctionMixin, nodes.Item, fixtures.FuncargnamesCompatAttr): + """ a Function Item is responsible for setting up and executing a + Python test function. + """ + + # disable since functions handle it themselves + _ALLOW_MARKERS = False + + def __init__( + self, + name, + parent, + args=None, + config=None, + callspec=None, + callobj=NOTSET, + keywords=None, + session=None, + fixtureinfo=None, + originalname=None, + ): + super(Function, self).__init__(name, parent, config=config, session=session) + self._args = args + if callobj is not NOTSET: + self.obj = callobj + + self.keywords.update(self.obj.__dict__) + self.own_markers.extend(get_unpacked_marks(self.obj)) + if callspec: + self.callspec = callspec + # this is total hostile and a mess + # keywords are broken by design by now + # this will be redeemed later + for mark in callspec.marks: + # feel free to cry, this was broken for years before + # and keywords cant fix it per design + self.keywords[mark.name] = mark + self.own_markers.extend(normalize_mark_list(callspec.marks)) + if keywords: + self.keywords.update(keywords) + + # todo: this is a hell of a hack + # https://github.com/pytest-dev/pytest/issues/4569 + + self.keywords.update( + dict.fromkeys( + [ + mark.name + for mark in self.iter_markers() + if mark.name not in self.keywords + ], + True, + ) + ) + + if fixtureinfo is None: + fixtureinfo = self.session._fixturemanager.getfixtureinfo( + self, self.obj, self.cls, funcargs=True + ) + self._fixtureinfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + #: original function name, without any decorations (for example + #: parametrization adds a ``"[...]"`` suffix to function names). + #: + #: .. versionadded:: 3.0 + self.originalname = originalname + + def _initrequest(self): + self.funcargs = {} + self._request = fixtures.FixtureRequest(self) + + @property + def function(self): + "underlying python 'function' object" + return getimfunc(self.obj) + + def _getobj(self): + name = self.name + i = name.find("[") # parametrization + if i != -1: + name = name[:i] + return getattr(self.parent.obj, name) + + @property + def _pyfuncitem(self): + "(compatonly) for code expecting pytest-2.2 style request objects" + return self + + def runtest(self): + """ execute the underlying test function. """ + self.ihook.pytest_pyfunc_call(pyfuncitem=self) + + def setup(self): + super(Function, self).setup() + fixtures.fillfixtures(self) + + +class FunctionDefinition(Function): + """ + internal hack until we get actual definition nodes instead of the + crappy metafunc hack + """ + + def runtest(self): + raise RuntimeError("function definitions are not supposed to be used") + + setup = runtest diff --git a/venv/lib/python2.7/site-packages/_pytest/python_api.py b/venv/lib/python2.7/site-packages/_pytest/python_api.py new file mode 100644 index 0000000..f6e475c --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/python_api.py @@ -0,0 +1,743 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import math +import pprint +import sys +import warnings +from decimal import Decimal +from numbers import Number + +from more_itertools.more import always_iterable +from six.moves import filterfalse +from six.moves import zip + +import _pytest._code +from _pytest import deprecated +from _pytest.compat import isclass +from _pytest.compat import Iterable +from _pytest.compat import Mapping +from _pytest.compat import Sized +from _pytest.compat import STRING_TYPES +from _pytest.outcomes import fail + +BASE_TYPE = (type, STRING_TYPES) + + +def _cmp_raises_type_error(self, other): + """__cmp__ implementation which raises TypeError. Used + by Approx base classes to implement only == and != and raise a + TypeError for other comparisons. + + Needed in Python 2 only, Python 3 all it takes is not implementing the + other operators at all. + """ + __tracebackhide__ = True + raise TypeError( + "Comparison operators other than == and != not supported by approx objects" + ) + + +def _non_numeric_type_error(value, at): + at_str = " at {}".format(at) if at else "" + return TypeError( + "cannot make approximate comparisons to non-numeric values: {!r} {}".format( + value, at_str + ) + ) + + +# builtin pytest.approx helper + + +class ApproxBase(object): + """ + Provide shared utilities for making approximate comparisons between numbers + or sequences of numbers. + """ + + # Tell numpy to use our `__eq__` operator instead of its. + __array_ufunc__ = None + __array_priority__ = 100 + + def __init__(self, expected, rel=None, abs=None, nan_ok=False): + __tracebackhide__ = True + self.expected = expected + self.abs = abs + self.rel = rel + self.nan_ok = nan_ok + self._check_type() + + def __repr__(self): + raise NotImplementedError + + def __eq__(self, actual): + return all( + a == self._approx_scalar(x) for a, x in self._yield_comparisons(actual) + ) + + __hash__ = None + + def __ne__(self, actual): + return not (actual == self) + + if sys.version_info[0] == 2: + __cmp__ = _cmp_raises_type_error + + def _approx_scalar(self, x): + return ApproxScalar(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + + def _yield_comparisons(self, actual): + """ + Yield all the pairs of numbers to be compared. This is used to + implement the `__eq__` method. + """ + raise NotImplementedError + + def _check_type(self): + """ + Raise a TypeError if the expected value is not a valid type. + """ + # This is only a concern if the expected value is a sequence. In every + # other case, the approx() function ensures that the expected value has + # a numeric type. For this reason, the default is to do nothing. The + # classes that deal with sequences should reimplement this method to + # raise if there are any non-numeric elements in the sequence. + pass + + +def _recursive_list_map(f, x): + if isinstance(x, list): + return list(_recursive_list_map(f, xi) for xi in x) + else: + return f(x) + + +class ApproxNumpy(ApproxBase): + """ + Perform approximate comparisons where the expected value is numpy array. + """ + + def __repr__(self): + list_scalars = _recursive_list_map(self._approx_scalar, self.expected.tolist()) + return "approx({!r})".format(list_scalars) + + if sys.version_info[0] == 2: + __cmp__ = _cmp_raises_type_error + + def __eq__(self, actual): + import numpy as np + + # self.expected is supposed to always be an array here + + if not np.isscalar(actual): + try: + actual = np.asarray(actual) + except: # noqa + raise TypeError("cannot compare '{}' to numpy.ndarray".format(actual)) + + if not np.isscalar(actual) and actual.shape != self.expected.shape: + return False + + return ApproxBase.__eq__(self, actual) + + def _yield_comparisons(self, actual): + import numpy as np + + # `actual` can either be a numpy array or a scalar, it is treated in + # `__eq__` before being passed to `ApproxBase.__eq__`, which is the + # only method that calls this one. + + if np.isscalar(actual): + for i in np.ndindex(self.expected.shape): + yield actual, self.expected[i].item() + else: + for i in np.ndindex(self.expected.shape): + yield actual[i].item(), self.expected[i].item() + + +class ApproxMapping(ApproxBase): + """ + Perform approximate comparisons where the expected value is a mapping with + numeric values (the keys can be anything). + """ + + def __repr__(self): + return "approx({!r})".format( + {k: self._approx_scalar(v) for k, v in self.expected.items()} + ) + + def __eq__(self, actual): + if set(actual.keys()) != set(self.expected.keys()): + return False + + return ApproxBase.__eq__(self, actual) + + def _yield_comparisons(self, actual): + for k in self.expected.keys(): + yield actual[k], self.expected[k] + + def _check_type(self): + __tracebackhide__ = True + for key, value in self.expected.items(): + if isinstance(value, type(self.expected)): + msg = "pytest.approx() does not support nested dictionaries: key={!r} value={!r}\n full mapping={}" + raise TypeError(msg.format(key, value, pprint.pformat(self.expected))) + elif not isinstance(value, Number): + raise _non_numeric_type_error(self.expected, at="key={!r}".format(key)) + + +class ApproxSequencelike(ApproxBase): + """ + Perform approximate comparisons where the expected value is a sequence of + numbers. + """ + + def __repr__(self): + seq_type = type(self.expected) + if seq_type not in (tuple, list, set): + seq_type = list + return "approx({!r})".format( + seq_type(self._approx_scalar(x) for x in self.expected) + ) + + def __eq__(self, actual): + if len(actual) != len(self.expected): + return False + return ApproxBase.__eq__(self, actual) + + def _yield_comparisons(self, actual): + return zip(actual, self.expected) + + def _check_type(self): + __tracebackhide__ = True + for index, x in enumerate(self.expected): + if isinstance(x, type(self.expected)): + msg = "pytest.approx() does not support nested data structures: {!r} at index {}\n full sequence: {}" + raise TypeError(msg.format(x, index, pprint.pformat(self.expected))) + elif not isinstance(x, Number): + raise _non_numeric_type_error( + self.expected, at="index {}".format(index) + ) + + +class ApproxScalar(ApproxBase): + """ + Perform approximate comparisons where the expected value is a single number. + """ + + DEFAULT_ABSOLUTE_TOLERANCE = 1e-12 + DEFAULT_RELATIVE_TOLERANCE = 1e-6 + + def __repr__(self): + """ + Return a string communicating both the expected value and the tolerance + for the comparison being made, e.g. '1.0 +- 1e-6'. Use the unicode + plus/minus symbol if this is python3 (it's too hard to get right for + python2). + """ + if isinstance(self.expected, complex): + return str(self.expected) + + # Infinities aren't compared using tolerances, so don't show a + # tolerance. + if math.isinf(self.expected): + return str(self.expected) + + # If a sensible tolerance can't be calculated, self.tolerance will + # raise a ValueError. In this case, display '???'. + try: + vetted_tolerance = "{:.1e}".format(self.tolerance) + except ValueError: + vetted_tolerance = "???" + + if sys.version_info[0] == 2: + return "{} +- {}".format(self.expected, vetted_tolerance) + else: + return u"{} \u00b1 {}".format(self.expected, vetted_tolerance) + + def __eq__(self, actual): + """ + Return true if the given value is equal to the expected value within + the pre-specified tolerance. + """ + if _is_numpy_array(actual): + # Call ``__eq__()`` manually to prevent infinite-recursion with + # numpy<1.13. See #3748. + return all(self.__eq__(a) for a in actual.flat) + + # Short-circuit exact equality. + if actual == self.expected: + return True + + # Allow the user to control whether NaNs are considered equal to each + # other or not. The abs() calls are for compatibility with complex + # numbers. + if math.isnan(abs(self.expected)): + return self.nan_ok and math.isnan(abs(actual)) + + # Infinity shouldn't be approximately equal to anything but itself, but + # if there's a relative tolerance, it will be infinite and infinity + # will seem approximately equal to everything. The equal-to-itself + # case would have been short circuited above, so here we can just + # return false if the expected value is infinite. The abs() call is + # for compatibility with complex numbers. + if math.isinf(abs(self.expected)): + return False + + # Return true if the two numbers are within the tolerance. + return abs(self.expected - actual) <= self.tolerance + + __hash__ = None + + @property + def tolerance(self): + """ + Return the tolerance for the comparison. This could be either an + absolute tolerance or a relative tolerance, depending on what the user + specified or which would be larger. + """ + + def set_default(x, default): + return x if x is not None else default + + # Figure out what the absolute tolerance should be. ``self.abs`` is + # either None or a value specified by the user. + absolute_tolerance = set_default(self.abs, self.DEFAULT_ABSOLUTE_TOLERANCE) + + if absolute_tolerance < 0: + raise ValueError( + "absolute tolerance can't be negative: {}".format(absolute_tolerance) + ) + if math.isnan(absolute_tolerance): + raise ValueError("absolute tolerance can't be NaN.") + + # If the user specified an absolute tolerance but not a relative one, + # just return the absolute tolerance. + if self.rel is None: + if self.abs is not None: + return absolute_tolerance + + # Figure out what the relative tolerance should be. ``self.rel`` is + # either None or a value specified by the user. This is done after + # we've made sure the user didn't ask for an absolute tolerance only, + # because we don't want to raise errors about the relative tolerance if + # we aren't even going to use it. + relative_tolerance = set_default( + self.rel, self.DEFAULT_RELATIVE_TOLERANCE + ) * abs(self.expected) + + if relative_tolerance < 0: + raise ValueError( + "relative tolerance can't be negative: {}".format(absolute_tolerance) + ) + if math.isnan(relative_tolerance): + raise ValueError("relative tolerance can't be NaN.") + + # Return the larger of the relative and absolute tolerances. + return max(relative_tolerance, absolute_tolerance) + + +class ApproxDecimal(ApproxScalar): + """ + Perform approximate comparisons where the expected value is a decimal. + """ + + DEFAULT_ABSOLUTE_TOLERANCE = Decimal("1e-12") + DEFAULT_RELATIVE_TOLERANCE = Decimal("1e-6") + + +def approx(expected, rel=None, abs=None, nan_ok=False): + """ + Assert that two numbers (or two sets of numbers) are equal to each other + within some tolerance. + + Due to the `intricacies of floating-point arithmetic`__, numbers that we + would intuitively expect to be equal are not always so:: + + >>> 0.1 + 0.2 == 0.3 + False + + __ https://docs.python.org/3/tutorial/floatingpoint.html + + This problem is commonly encountered when writing tests, e.g. when making + sure that floating-point values are what you expect them to be. One way to + deal with this problem is to assert that two floating-point numbers are + equal to within some appropriate tolerance:: + + >>> abs((0.1 + 0.2) - 0.3) < 1e-6 + True + + However, comparisons like this are tedious to write and difficult to + understand. Furthermore, absolute comparisons like the one above are + usually discouraged because there's no tolerance that works well for all + situations. ``1e-6`` is good for numbers around ``1``, but too small for + very big numbers and too big for very small ones. It's better to express + the tolerance as a fraction of the expected value, but relative comparisons + like that are even more difficult to write correctly and concisely. + + The ``approx`` class performs floating-point comparisons using a syntax + that's as intuitive as possible:: + + >>> from pytest import approx + >>> 0.1 + 0.2 == approx(0.3) + True + + The same syntax also works for sequences of numbers:: + + >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6)) + True + + Dictionary *values*:: + + >>> {'a': 0.1 + 0.2, 'b': 0.2 + 0.4} == approx({'a': 0.3, 'b': 0.6}) + True + + ``numpy`` arrays:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.4]) == approx(np.array([0.3, 0.6])) # doctest: +SKIP + True + + And for a ``numpy`` array against a scalar:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.1]) == approx(0.3) # doctest: +SKIP + True + + By default, ``approx`` considers numbers within a relative tolerance of + ``1e-6`` (i.e. one part in a million) of its expected value to be equal. + This treatment would lead to surprising results if the expected value was + ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``. + To handle this case less surprisingly, ``approx`` also considers numbers + within an absolute tolerance of ``1e-12`` of its expected value to be + equal. Infinity and NaN are special cases. Infinity is only considered + equal to itself, regardless of the relative tolerance. NaN is not + considered equal to anything by default, but you can make it be equal to + itself by setting the ``nan_ok`` argument to True. (This is meant to + facilitate comparing arrays that use NaN to mean "no data".) + + Both the relative and absolute tolerances can be changed by passing + arguments to the ``approx`` constructor:: + + >>> 1.0001 == approx(1) + False + >>> 1.0001 == approx(1, rel=1e-3) + True + >>> 1.0001 == approx(1, abs=1e-3) + True + + If you specify ``abs`` but not ``rel``, the comparison will not consider + the relative tolerance at all. In other words, two numbers that are within + the default relative tolerance of ``1e-6`` will still be considered unequal + if they exceed the specified absolute tolerance. If you specify both + ``abs`` and ``rel``, the numbers will be considered equal if either + tolerance is met:: + + >>> 1 + 1e-8 == approx(1) + True + >>> 1 + 1e-8 == approx(1, abs=1e-12) + False + >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12) + True + + If you're thinking about using ``approx``, then you might want to know how + it compares to other good ways of comparing floating-point numbers. All of + these algorithms are based on relative and absolute tolerances and should + agree for the most part, but they do have meaningful differences: + + - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative + tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute + tolerance is met. Because the relative tolerance is calculated w.r.t. + both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor + ``b`` is a "reference value"). You have to specify an absolute tolerance + if you want to compare to ``0.0`` because there is no tolerance by + default. Only available in python>=3.5. `More information...`__ + + __ https://docs.python.org/3/library/math.html#math.isclose + + - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference + between ``a`` and ``b`` is less that the sum of the relative tolerance + w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance + is only calculated w.r.t. ``b``, this test is asymmetric and you can + think of ``b`` as the reference value. Support for comparing sequences + is provided by ``numpy.allclose``. `More information...`__ + + __ http://docs.scipy.org/doc/numpy-1.10.0/reference/generated/numpy.isclose.html + + - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b`` + are within an absolute tolerance of ``1e-7``. No relative tolerance is + considered and the absolute tolerance cannot be changed, so this function + is not appropriate for very large or very small numbers. Also, it's only + available in subclasses of ``unittest.TestCase`` and it's ugly because it + doesn't follow PEP8. `More information...`__ + + __ https://docs.python.org/3/library/unittest.html#unittest.TestCase.assertAlmostEqual + + - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative + tolerance is met w.r.t. ``b`` or if the absolute tolerance is met. + Because the relative tolerance is only calculated w.r.t. ``b``, this test + is asymmetric and you can think of ``b`` as the reference value. In the + special case that you explicitly specify an absolute tolerance but not a + relative tolerance, only the absolute tolerance is considered. + + .. warning:: + + .. versionchanged:: 3.2 + + In order to avoid inconsistent behavior, ``TypeError`` is + raised for ``>``, ``>=``, ``<`` and ``<=`` comparisons. + The example below illustrates the problem:: + + assert approx(0.1) > 0.1 + 1e-10 # calls approx(0.1).__gt__(0.1 + 1e-10) + assert 0.1 + 1e-10 > approx(0.1) # calls approx(0.1).__lt__(0.1 + 1e-10) + + In the second example one expects ``approx(0.1).__le__(0.1 + 1e-10)`` + to be called. But instead, ``approx(0.1).__lt__(0.1 + 1e-10)`` is used to + comparison. This is because the call hierarchy of rich comparisons + follows a fixed behavior. `More information...`__ + + __ https://docs.python.org/3/reference/datamodel.html#object.__ge__ + """ + + # Delegate the comparison to a class that knows how to deal with the type + # of the expected value (e.g. int, float, list, dict, numpy.array, etc). + # + # The primary responsibility of these classes is to implement ``__eq__()`` + # and ``__repr__()``. The former is used to actually check if some + # "actual" value is equivalent to the given expected value within the + # allowed tolerance. The latter is used to show the user the expected + # value and tolerance, in the case that a test failed. + # + # The actual logic for making approximate comparisons can be found in + # ApproxScalar, which is used to compare individual numbers. All of the + # other Approx classes eventually delegate to this class. The ApproxBase + # class provides some convenient methods and overloads, but isn't really + # essential. + + __tracebackhide__ = True + + if isinstance(expected, Decimal): + cls = ApproxDecimal + elif isinstance(expected, Number): + cls = ApproxScalar + elif isinstance(expected, Mapping): + cls = ApproxMapping + elif _is_numpy_array(expected): + cls = ApproxNumpy + elif ( + isinstance(expected, Iterable) + and isinstance(expected, Sized) + and not isinstance(expected, STRING_TYPES) + ): + cls = ApproxSequencelike + else: + raise _non_numeric_type_error(expected, at=None) + + return cls(expected, rel, abs, nan_ok) + + +def _is_numpy_array(obj): + """ + Return true if the given object is a numpy array. Make a special effort to + avoid importing numpy unless it's really necessary. + """ + import sys + + np = sys.modules.get("numpy") + if np is not None: + return isinstance(obj, np.ndarray) + return False + + +# builtin pytest.raises helper + + +def raises(expected_exception, *args, **kwargs): + r""" + Assert that a code block/function call raises ``expected_exception`` + or raise a failure exception otherwise. + + :kwparam match: if specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception using ``re.search``. To match a literal + string that may contain `special characters`__, the pattern can + first be escaped with ``re.escape``. + + __ https://docs.python.org/3/library/re.html#regular-expression-syntax + + :kwparam message: **(deprecated since 4.1)** if specified, provides a custom failure message + if the exception is not raised. See :ref:`the deprecation docs ` for a workaround. + + .. currentmodule:: _pytest._code + + Use ``pytest.raises`` as a context manager, which will capture the exception of the given + type:: + + >>> with raises(ZeroDivisionError): + ... 1/0 + + If the code block does not raise the expected exception (``ZeroDivisionError`` in the example + above), or no exception at all, the check will fail instead. + + You can also use the keyword argument ``match`` to assert that the + exception matches a text or regex:: + + >>> with raises(ValueError, match='must be 0 or None'): + ... raise ValueError("value must be 0 or None") + + >>> with raises(ValueError, match=r'must be \d+$'): + ... raise ValueError("value must be 42") + + The context manager produces an :class:`ExceptionInfo` object which can be used to inspect the + details of the captured exception:: + + >>> with raises(ValueError) as exc_info: + ... raise ValueError("value must be 42") + >>> assert exc_info.type is ValueError + >>> assert exc_info.value.args[0] == "value must be 42" + + .. deprecated:: 4.1 + + In the context manager form you may use the keyword argument + ``message`` to specify a custom failure message that will be displayed + in case the ``pytest.raises`` check fails. This has been deprecated as it + is considered error prone as users often mean to use ``match`` instead. + See :ref:`the deprecation docs ` for a workaround. + + .. note:: + + When using ``pytest.raises`` as a context manager, it's worthwhile to + note that normal context manager rules apply and that the exception + raised *must* be the final line in the scope of the context manager. + Lines of code after that, within the scope of the context manager will + not be executed. For example:: + + >>> value = 15 + >>> with raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... assert exc_info.type is ValueError # this will not execute + + Instead, the following approach must be taken (note the difference in + scope):: + + >>> with raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... + >>> assert exc_info.type is ValueError + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` + it is possible to parametrize tests such that + some runs raise an exception and others do not. + + See :ref:`parametrizing_conditional_raising` for an example. + + **Legacy form** + + It is possible to specify a callable by passing a to-be-called lambda:: + + >>> raises(ZeroDivisionError, lambda: 1/0) + + + or you can specify an arbitrary callable with arguments:: + + >>> def f(x): return 1/x + ... + >>> raises(ZeroDivisionError, f, 0) + + >>> raises(ZeroDivisionError, f, x=0) + + + The form above is fully supported but discouraged for new code because the + context manager form is regarded as more readable and less error-prone. + + .. note:: + Similar to caught exception objects in Python, explicitly clearing + local references to returned ``ExceptionInfo`` objects can + help the Python interpreter speed up its garbage collection. + + Clearing those references breaks a reference cycle + (``ExceptionInfo`` --> caught exception --> frame stack raising + the exception --> current frame stack --> local variables --> + ``ExceptionInfo``) which makes Python keep all objects referenced + from that cycle (including all local variables in the current + frame) alive until the next cyclic garbage collection run. See the + official Python ``try`` statement documentation for more detailed + information. + + """ + __tracebackhide__ = True + for exc in filterfalse(isclass, always_iterable(expected_exception, BASE_TYPE)): + msg = ( + "exceptions must be old-style classes or" + " derived from BaseException, not %s" + ) + raise TypeError(msg % type(exc)) + + message = "DID NOT RAISE {}".format(expected_exception) + match_expr = None + + if not args: + if "message" in kwargs: + message = kwargs.pop("message") + warnings.warn(deprecated.RAISES_MESSAGE_PARAMETER, stacklevel=2) + if "match" in kwargs: + match_expr = kwargs.pop("match") + if kwargs: + msg = "Unexpected keyword arguments passed to pytest.raises: " + msg += ", ".join(sorted(kwargs)) + raise TypeError(msg) + return RaisesContext(expected_exception, message, match_expr) + elif isinstance(args[0], str): + warnings.warn(deprecated.RAISES_EXEC, stacklevel=2) + (code,) = args + assert isinstance(code, str) + frame = sys._getframe(1) + loc = frame.f_locals.copy() + loc.update(kwargs) + # print "raises frame scope: %r" % frame.f_locals + try: + code = _pytest._code.Source(code).compile(_genframe=frame) + exec(code, frame.f_globals, loc) + # XXX didn't mean f_globals == f_locals something special? + # this is destroyed here ... + except expected_exception: + return _pytest._code.ExceptionInfo.from_current() + else: + func = args[0] + try: + func(*args[1:], **kwargs) + except expected_exception: + return _pytest._code.ExceptionInfo.from_current() + fail(message) + + +raises.Exception = fail.Exception + + +class RaisesContext(object): + def __init__(self, expected_exception, message, match_expr): + self.expected_exception = expected_exception + self.message = message + self.match_expr = match_expr + self.excinfo = None + + def __enter__(self): + self.excinfo = _pytest._code.ExceptionInfo.for_later() + return self.excinfo + + def __exit__(self, *tp): + __tracebackhide__ = True + if tp[0] is None: + fail(self.message) + self.excinfo.__init__(tp) + suppress_exception = issubclass(self.excinfo.type, self.expected_exception) + if sys.version_info[0] == 2 and suppress_exception: + sys.exc_clear() + if self.match_expr is not None and suppress_exception: + self.excinfo.match(self.match_expr) + return suppress_exception diff --git a/venv/lib/python2.7/site-packages/_pytest/recwarn.py b/venv/lib/python2.7/site-packages/_pytest/recwarn.py new file mode 100644 index 0000000..7abf2e9 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/recwarn.py @@ -0,0 +1,251 @@ +# -*- coding: utf-8 -*- +""" recording warnings during test function execution. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import inspect +import re +import sys +import warnings + +import six + +import _pytest._code +from _pytest.deprecated import PYTEST_WARNS_UNKNOWN_KWARGS +from _pytest.deprecated import WARNS_EXEC +from _pytest.fixtures import yield_fixture +from _pytest.outcomes import fail + + +@yield_fixture +def recwarn(): + """Return a :class:`WarningsRecorder` instance that records all warnings emitted by test functions. + + See http://docs.python.org/library/warnings.html for information + on warning categories. + """ + wrec = WarningsRecorder() + with wrec: + warnings.simplefilter("default") + yield wrec + + +def deprecated_call(func=None, *args, **kwargs): + """context manager that can be used to ensure a block of code triggers a + ``DeprecationWarning`` or ``PendingDeprecationWarning``:: + + >>> import warnings + >>> def api_call_v2(): + ... warnings.warn('use v3 of this api', DeprecationWarning) + ... return 200 + + >>> with deprecated_call(): + ... assert api_call_v2() == 200 + + ``deprecated_call`` can also be used by passing a function and ``*args`` and ``*kwargs``, + in which case it will ensure calling ``func(*args, **kwargs)`` produces one of the warnings + types above. + """ + __tracebackhide__ = True + if func is not None: + args = (func,) + args + return warns((DeprecationWarning, PendingDeprecationWarning), *args, **kwargs) + + +def warns(expected_warning, *args, **kwargs): + r"""Assert that code raises a particular class of warning. + + Specifically, the parameter ``expected_warning`` can be a warning class or + sequence of warning classes, and the inside the ``with`` block must issue a warning of that class or + classes. + + This helper produces a list of :class:`warnings.WarningMessage` objects, + one for each warning raised. + + This function can be used as a context manager, or any of the other ways + ``pytest.raises`` can be used:: + + >>> with warns(RuntimeWarning): + ... warnings.warn("my warning", RuntimeWarning) + + In the context manager form you may use the keyword argument ``match`` to assert + that the exception matches a text or regex:: + + >>> with warns(UserWarning, match='must be 0 or None'): + ... warnings.warn("value must be 0 or None", UserWarning) + + >>> with warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("value must be 42", UserWarning) + + >>> with warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("this is not here", UserWarning) + Traceback (most recent call last): + ... + Failed: DID NOT WARN. No warnings of type ...UserWarning... was emitted... + + """ + __tracebackhide__ = True + if not args: + match_expr = kwargs.pop("match", None) + if kwargs: + warnings.warn( + PYTEST_WARNS_UNKNOWN_KWARGS.format(args=sorted(kwargs)), stacklevel=2 + ) + return WarningsChecker(expected_warning, match_expr=match_expr) + elif isinstance(args[0], str): + warnings.warn(WARNS_EXEC, stacklevel=2) + (code,) = args + assert isinstance(code, str) + frame = sys._getframe(1) + loc = frame.f_locals.copy() + loc.update(kwargs) + + with WarningsChecker(expected_warning): + code = _pytest._code.Source(code).compile() + exec(code, frame.f_globals, loc) + else: + func = args[0] + with WarningsChecker(expected_warning): + return func(*args[1:], **kwargs) + + +class WarningsRecorder(warnings.catch_warnings): + """A context manager to record raised warnings. + + Adapted from `warnings.catch_warnings`. + """ + + def __init__(self): + super(WarningsRecorder, self).__init__(record=True) + self._entered = False + self._list = [] + + @property + def list(self): + """The list of recorded warnings.""" + return self._list + + def __getitem__(self, i): + """Get a recorded warning by index.""" + return self._list[i] + + def __iter__(self): + """Iterate through the recorded warnings.""" + return iter(self._list) + + def __len__(self): + """The number of recorded warnings.""" + return len(self._list) + + def pop(self, cls=Warning): + """Pop the first recorded warning, raise exception if not exists.""" + for i, w in enumerate(self._list): + if issubclass(w.category, cls): + return self._list.pop(i) + __tracebackhide__ = True + raise AssertionError("%r not found in warning list" % cls) + + def clear(self): + """Clear the list of recorded warnings.""" + self._list[:] = [] + + def __enter__(self): + if self._entered: + __tracebackhide__ = True + raise RuntimeError("Cannot enter %r twice" % self) + self._list = super(WarningsRecorder, self).__enter__() + warnings.simplefilter("always") + # python3 keeps track of a "filter version", when the filters are + # updated previously seen warnings can be re-warned. python2 has no + # concept of this so we must reset the warnings registry manually. + # trivial patching of `warnings.warn` seems to be enough somehow? + if six.PY2: + + def warn(message, category=None, stacklevel=1): + # duplicate the stdlib logic due to + # bad handing in the c version of warnings + if isinstance(message, Warning): + category = message.__class__ + # Check category argument + if category is None: + category = UserWarning + assert issubclass(category, Warning) + + # emulate resetting the warn registry + f_globals = sys._getframe(stacklevel).f_globals + if "__warningregistry__" in f_globals: + orig = f_globals["__warningregistry__"] + f_globals["__warningregistry__"] = None + try: + return self._saved_warn(message, category, stacklevel + 1) + finally: + f_globals["__warningregistry__"] = orig + else: + return self._saved_warn(message, category, stacklevel + 1) + + warnings.warn, self._saved_warn = warn, warnings.warn + return self + + def __exit__(self, *exc_info): + if not self._entered: + __tracebackhide__ = True + raise RuntimeError("Cannot exit %r without entering first" % self) + # see above where `self._saved_warn` is assigned + if six.PY2: + warnings.warn = self._saved_warn + super(WarningsRecorder, self).__exit__(*exc_info) + + # Built-in catch_warnings does not reset entered state so we do it + # manually here for this context manager to become reusable. + self._entered = False + + +class WarningsChecker(WarningsRecorder): + def __init__(self, expected_warning=None, match_expr=None): + super(WarningsChecker, self).__init__() + + msg = "exceptions must be old-style classes or derived from Warning, not %s" + if isinstance(expected_warning, tuple): + for exc in expected_warning: + if not inspect.isclass(exc): + raise TypeError(msg % type(exc)) + elif inspect.isclass(expected_warning): + expected_warning = (expected_warning,) + elif expected_warning is not None: + raise TypeError(msg % type(expected_warning)) + + self.expected_warning = expected_warning + self.match_expr = match_expr + + def __exit__(self, *exc_info): + super(WarningsChecker, self).__exit__(*exc_info) + + __tracebackhide__ = True + + # only check if we're not currently handling an exception + if all(a is None for a in exc_info): + if self.expected_warning is not None: + if not any(issubclass(r.category, self.expected_warning) for r in self): + __tracebackhide__ = True + fail( + "DID NOT WARN. No warnings of type {} was emitted. " + "The list of emitted warnings is: {}.".format( + self.expected_warning, [each.message for each in self] + ) + ) + elif self.match_expr is not None: + for r in self: + if issubclass(r.category, self.expected_warning): + if re.compile(self.match_expr).search(str(r.message)): + break + else: + fail( + "DID NOT WARN. No warnings of type {} matching" + " ('{}') was emitted. The list of emitted warnings" + " is: {}.".format( + self.expected_warning, + self.match_expr, + [each.message for each in self], + ) + ) diff --git a/venv/lib/python2.7/site-packages/_pytest/reports.py b/venv/lib/python2.7/site-packages/_pytest/reports.py new file mode 100644 index 0000000..0bba676 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/reports.py @@ -0,0 +1,435 @@ +# -*- coding: utf-8 -*- +from pprint import pprint + +import py +import six + +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ReprEntry +from _pytest._code.code import ReprEntryNative +from _pytest._code.code import ReprExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import ReprFuncArgs +from _pytest._code.code import ReprLocals +from _pytest._code.code import ReprTraceback +from _pytest._code.code import TerminalRepr +from _pytest.outcomes import skip +from _pytest.pathlib import Path + + +def getslaveinfoline(node): + try: + return node._slaveinfocache + except AttributeError: + d = node.slaveinfo + ver = "%s.%s.%s" % d["version_info"][:3] + node._slaveinfocache = s = "[%s] %s -- Python %s %s" % ( + d["id"], + d["sysplatform"], + ver, + d["executable"], + ) + return s + + +class BaseReport(object): + when = None + location = None + + def __init__(self, **kw): + self.__dict__.update(kw) + + def toterminal(self, out): + if hasattr(self, "node"): + out.line(getslaveinfoline(self.node)) + + longrepr = self.longrepr + if longrepr is None: + return + + if hasattr(longrepr, "toterminal"): + longrepr.toterminal(out) + else: + try: + out.line(longrepr) + except UnicodeEncodeError: + out.line("") + + def get_sections(self, prefix): + for name, content in self.sections: + if name.startswith(prefix): + yield prefix, content + + @property + def longreprtext(self): + """ + Read-only property that returns the full string representation + of ``longrepr``. + + .. versionadded:: 3.0 + """ + tw = py.io.TerminalWriter(stringio=True) + tw.hasmarkup = False + self.toterminal(tw) + exc = tw.stringio.getvalue() + return exc.strip() + + @property + def caplog(self): + """Return captured log lines, if log capturing is enabled + + .. versionadded:: 3.5 + """ + return "\n".join( + content for (prefix, content) in self.get_sections("Captured log") + ) + + @property + def capstdout(self): + """Return captured text from stdout, if capturing is enabled + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stdout") + ) + + @property + def capstderr(self): + """Return captured text from stderr, if capturing is enabled + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stderr") + ) + + passed = property(lambda x: x.outcome == "passed") + failed = property(lambda x: x.outcome == "failed") + skipped = property(lambda x: x.outcome == "skipped") + + @property + def fspath(self): + return self.nodeid.split("::")[0] + + @property + def count_towards_summary(self): + """ + **Experimental** + + Returns True if this report should be counted towards the totals shown at the end of the + test session: "1 passed, 1 failure, etc". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + return True + + @property + def head_line(self): + """ + **Experimental** + + Returns the head line shown with longrepr output for this report, more commonly during + traceback representation during failures:: + + ________ Test.foo ________ + + + In the example above, the head_line is "Test.foo". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + if self.location is not None: + fspath, lineno, domain = self.location + return domain + + def _get_verbose_word(self, config): + _category, _short, verbose = config.hook.pytest_report_teststatus( + report=self, config=config + ) + return verbose + + def _to_json(self): + """ + This was originally the serialize_report() function from xdist (ca03269). + + Returns the contents of this report as a dict of builtin entries, suitable for + serialization. + + Experimental method. + """ + + def disassembled_report(rep): + reprtraceback = rep.longrepr.reprtraceback.__dict__.copy() + reprcrash = rep.longrepr.reprcrash.__dict__.copy() + + new_entries = [] + for entry in reprtraceback["reprentries"]: + entry_data = { + "type": type(entry).__name__, + "data": entry.__dict__.copy(), + } + for key, value in entry_data["data"].items(): + if hasattr(value, "__dict__"): + entry_data["data"][key] = value.__dict__.copy() + new_entries.append(entry_data) + + reprtraceback["reprentries"] = new_entries + + return { + "reprcrash": reprcrash, + "reprtraceback": reprtraceback, + "sections": rep.longrepr.sections, + } + + d = self.__dict__.copy() + if hasattr(self.longrepr, "toterminal"): + if hasattr(self.longrepr, "reprtraceback") and hasattr( + self.longrepr, "reprcrash" + ): + d["longrepr"] = disassembled_report(self) + else: + d["longrepr"] = six.text_type(self.longrepr) + else: + d["longrepr"] = self.longrepr + for name in d: + if isinstance(d[name], (py.path.local, Path)): + d[name] = str(d[name]) + elif name == "result": + d[name] = None # for now + return d + + @classmethod + def _from_json(cls, reportdict): + """ + This was originally the serialize_report() function from xdist (ca03269). + + Factory method that returns either a TestReport or CollectReport, depending on the calling + class. It's the callers responsibility to know which class to pass here. + + Experimental method. + """ + if reportdict["longrepr"]: + if ( + "reprcrash" in reportdict["longrepr"] + and "reprtraceback" in reportdict["longrepr"] + ): + + reprtraceback = reportdict["longrepr"]["reprtraceback"] + reprcrash = reportdict["longrepr"]["reprcrash"] + + unserialized_entries = [] + reprentry = None + for entry_data in reprtraceback["reprentries"]: + data = entry_data["data"] + entry_type = entry_data["type"] + if entry_type == "ReprEntry": + reprfuncargs = None + reprfileloc = None + reprlocals = None + if data["reprfuncargs"]: + reprfuncargs = ReprFuncArgs(**data["reprfuncargs"]) + if data["reprfileloc"]: + reprfileloc = ReprFileLocation(**data["reprfileloc"]) + if data["reprlocals"]: + reprlocals = ReprLocals(data["reprlocals"]["lines"]) + + reprentry = ReprEntry( + lines=data["lines"], + reprfuncargs=reprfuncargs, + reprlocals=reprlocals, + filelocrepr=reprfileloc, + style=data["style"], + ) + elif entry_type == "ReprEntryNative": + reprentry = ReprEntryNative(data["lines"]) + else: + _report_unserialization_failure(entry_type, cls, reportdict) + unserialized_entries.append(reprentry) + reprtraceback["reprentries"] = unserialized_entries + + exception_info = ReprExceptionInfo( + reprtraceback=ReprTraceback(**reprtraceback), + reprcrash=ReprFileLocation(**reprcrash), + ) + + for section in reportdict["longrepr"]["sections"]: + exception_info.addsection(*section) + reportdict["longrepr"] = exception_info + + return cls(**reportdict) + + +def _report_unserialization_failure(type_name, report_class, reportdict): + url = "https://github.com/pytest-dev/pytest/issues" + stream = py.io.TextIO() + pprint("-" * 100, stream=stream) + pprint("INTERNALERROR: Unknown entry type returned: %s" % type_name, stream=stream) + pprint("report_name: %s" % report_class, stream=stream) + pprint(reportdict, stream=stream) + pprint("Please report this bug at %s" % url, stream=stream) + pprint("-" * 100, stream=stream) + raise RuntimeError(stream.getvalue()) + + +class TestReport(BaseReport): + """ Basic test report object (also used for setup and teardown calls if + they fail). + """ + + __test__ = False + + def __init__( + self, + nodeid, + location, + keywords, + outcome, + longrepr, + when, + sections=(), + duration=0, + user_properties=None, + **extra + ): + #: normalized collection node id + self.nodeid = nodeid + + #: a (filesystempath, lineno, domaininfo) tuple indicating the + #: actual location of a test item - it might be different from the + #: collected one e.g. if a method is inherited from a different module. + self.location = location + + #: a name -> value dictionary containing all keywords and + #: markers associated with a test invocation. + self.keywords = keywords + + #: test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: one of 'setup', 'call', 'teardown' to indicate runtest phase. + self.when = when + + #: user properties is a list of tuples (name, value) that holds user + #: defined properties of the test + self.user_properties = list(user_properties or []) + + #: list of pairs ``(str, str)`` of extra information which needs to + #: marshallable. Used by pytest to add captured text + #: from ``stdout`` and ``stderr``, but may be used by other plugins + #: to add arbitrary information to reports. + self.sections = list(sections) + + #: time it took to run just the test + self.duration = duration + + self.__dict__.update(extra) + + def __repr__(self): + return "<%s %r when=%r outcome=%r>" % ( + self.__class__.__name__, + self.nodeid, + self.when, + self.outcome, + ) + + @classmethod + def from_item_and_call(cls, item, call): + """ + Factory method to create and fill a TestReport with standard item and call info. + """ + when = call.when + duration = call.stop - call.start + keywords = {x: 1 for x in item.keywords} + excinfo = call.excinfo + sections = [] + if not call.excinfo: + outcome = "passed" + longrepr = None + else: + if not isinstance(excinfo, ExceptionInfo): + outcome = "failed" + longrepr = excinfo + elif excinfo.errisinstance(skip.Exception): + outcome = "skipped" + r = excinfo._getreprcrash() + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + if call.when == "call": + longrepr = item.repr_failure(excinfo) + else: # exception in setup or teardown + longrepr = item._repr_failure_py( + excinfo, style=item.config.getoption("tbstyle", "auto") + ) + for rwhen, key, content in item._report_sections: + sections.append(("Captured %s %s" % (key, rwhen), content)) + return cls( + item.nodeid, + item.location, + keywords, + outcome, + longrepr, + when, + sections, + duration, + user_properties=item.user_properties, + ) + + +class CollectReport(BaseReport): + when = "collect" + + def __init__(self, nodeid, outcome, longrepr, result, sections=(), **extra): + self.nodeid = nodeid + self.outcome = outcome + self.longrepr = longrepr + self.result = result or [] + self.sections = list(sections) + self.__dict__.update(extra) + + @property + def location(self): + return (self.fspath, None, self.fspath) + + def __repr__(self): + return "" % ( + self.nodeid, + len(self.result), + self.outcome, + ) + + +class CollectErrorRepr(TerminalRepr): + def __init__(self, msg): + self.longrepr = msg + + def toterminal(self, out): + out.line(self.longrepr, red=True) + + +def pytest_report_to_serializable(report): + if isinstance(report, (TestReport, CollectReport)): + data = report._to_json() + data["_report_type"] = report.__class__.__name__ + return data + + +def pytest_report_from_serializable(data): + if "_report_type" in data: + if data["_report_type"] == "TestReport": + return TestReport._from_json(data) + elif data["_report_type"] == "CollectReport": + return CollectReport._from_json(data) + assert False, "Unknown report_type unserialize data: {}".format( + data["_report_type"] + ) diff --git a/venv/lib/python2.7/site-packages/_pytest/resultlog.py b/venv/lib/python2.7/site-packages/_pytest/resultlog.py new file mode 100644 index 0000000..bd30b50 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/resultlog.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +""" log machine-parseable test session result information in a plain +text file. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os + +import py + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting", "resultlog plugin options") + group.addoption( + "--resultlog", + "--result-log", + action="store", + metavar="path", + default=None, + help="DEPRECATED path for machine-readable result log.", + ) + + +def pytest_configure(config): + resultlog = config.option.resultlog + # prevent opening resultlog on slave nodes (xdist) + if resultlog and not hasattr(config, "slaveinput"): + dirname = os.path.dirname(os.path.abspath(resultlog)) + if not os.path.isdir(dirname): + os.makedirs(dirname) + logfile = open(resultlog, "w", 1) # line buffered + config._resultlog = ResultLog(config, logfile) + config.pluginmanager.register(config._resultlog) + + from _pytest.deprecated import RESULT_LOG + from _pytest.warnings import _issue_warning_captured + + _issue_warning_captured(RESULT_LOG, config.hook, stacklevel=2) + + +def pytest_unconfigure(config): + resultlog = getattr(config, "_resultlog", None) + if resultlog: + resultlog.logfile.close() + del config._resultlog + config.pluginmanager.unregister(resultlog) + + +class ResultLog(object): + def __init__(self, config, logfile): + self.config = config + self.logfile = logfile # preferably line buffered + + def write_log_entry(self, testpath, lettercode, longrepr): + print("%s %s" % (lettercode, testpath), file=self.logfile) + for line in longrepr.splitlines(): + print(" %s" % line, file=self.logfile) + + def log_outcome(self, report, lettercode, longrepr): + testpath = getattr(report, "nodeid", None) + if testpath is None: + testpath = report.fspath + self.write_log_entry(testpath, lettercode, longrepr) + + def pytest_runtest_logreport(self, report): + if report.when != "call" and report.passed: + return + res = self.config.hook.pytest_report_teststatus( + report=report, config=self.config + ) + code = res[1] + if code == "x": + longrepr = str(report.longrepr) + elif code == "X": + longrepr = "" + elif report.passed: + longrepr = "" + elif report.failed: + longrepr = str(report.longrepr) + elif report.skipped: + longrepr = str(report.longrepr[2]) + self.log_outcome(report, code, longrepr) + + def pytest_collectreport(self, report): + if not report.passed: + if report.failed: + code = "F" + longrepr = str(report.longrepr) + else: + assert report.skipped + code = "S" + longrepr = "%s:%d: %s" % report.longrepr + self.log_outcome(report, code, longrepr) + + def pytest_internalerror(self, excrepr): + reprcrash = getattr(excrepr, "reprcrash", None) + path = getattr(reprcrash, "path", None) + if path is None: + path = "cwd:%s" % py.path.local() + self.write_log_entry(path, "!", str(excrepr)) diff --git a/venv/lib/python2.7/site-packages/_pytest/runner.py b/venv/lib/python2.7/site-packages/_pytest/runner.py new file mode 100644 index 0000000..d51e859 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/runner.py @@ -0,0 +1,375 @@ +# -*- coding: utf-8 -*- +""" basic collect and runtest protocol implementations """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import bdb +import os +import sys +from time import time + +import attr +import six + +from .reports import CollectErrorRepr +from .reports import CollectReport +from .reports import TestReport +from _pytest._code.code import ExceptionInfo +from _pytest.outcomes import Exit +from _pytest.outcomes import Skipped +from _pytest.outcomes import TEST_OUTCOME + +# +# pytest plugin hooks + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting", "reporting", after="general") + group.addoption( + "--durations", + action="store", + type=int, + default=None, + metavar="N", + help="show N slowest setup/test durations (N=0 for all).", + ), + + +def pytest_terminal_summary(terminalreporter): + durations = terminalreporter.config.option.durations + verbose = terminalreporter.config.getvalue("verbose") + if durations is None: + return + tr = terminalreporter + dlist = [] + for replist in tr.stats.values(): + for rep in replist: + if hasattr(rep, "duration"): + dlist.append(rep) + if not dlist: + return + dlist.sort(key=lambda x: x.duration) + dlist.reverse() + if not durations: + tr.write_sep("=", "slowest test durations") + else: + tr.write_sep("=", "slowest %s test durations" % durations) + dlist = dlist[:durations] + + for rep in dlist: + if verbose < 2 and rep.duration < 0.005: + tr.write_line("") + tr.write_line("(0.00 durations hidden. Use -vv to show these durations.)") + break + tr.write_line("%02.2fs %-8s %s" % (rep.duration, rep.when, rep.nodeid)) + + +def pytest_sessionstart(session): + session._setupstate = SetupState() + + +def pytest_sessionfinish(session): + session._setupstate.teardown_all() + + +def pytest_runtest_protocol(item, nextitem): + item.ihook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location) + runtestprotocol(item, nextitem=nextitem) + item.ihook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location) + return True + + +def runtestprotocol(item, log=True, nextitem=None): + hasrequest = hasattr(item, "_request") + if hasrequest and not item._request: + item._initrequest() + rep = call_and_report(item, "setup", log) + reports = [rep] + if rep.passed: + if item.config.getoption("setupshow", False): + show_test_item(item) + if not item.config.getoption("setuponly", False): + reports.append(call_and_report(item, "call", log)) + reports.append(call_and_report(item, "teardown", log, nextitem=nextitem)) + # after all teardown hooks have been called + # want funcargs and request info to go away + if hasrequest: + item._request = False + item.funcargs = None + return reports + + +def show_test_item(item): + """Show test function, parameters and the fixtures of the test item.""" + tw = item.config.get_terminal_writer() + tw.line() + tw.write(" " * 8) + tw.write(item._nodeid) + used_fixtures = sorted(item._fixtureinfo.name2fixturedefs.keys()) + if used_fixtures: + tw.write(" (fixtures used: {})".format(", ".join(used_fixtures))) + + +def pytest_runtest_setup(item): + _update_current_test_var(item, "setup") + item.session._setupstate.prepare(item) + + +def pytest_runtest_call(item): + _update_current_test_var(item, "call") + sys.last_type, sys.last_value, sys.last_traceback = (None, None, None) + try: + item.runtest() + except Exception: + # Store trace info to allow postmortem debugging + type, value, tb = sys.exc_info() + tb = tb.tb_next # Skip *this* frame + sys.last_type = type + sys.last_value = value + sys.last_traceback = tb + del type, value, tb # Get rid of these in this frame + raise + + +def pytest_runtest_teardown(item, nextitem): + _update_current_test_var(item, "teardown") + item.session._setupstate.teardown_exact(item, nextitem) + _update_current_test_var(item, None) + + +def _update_current_test_var(item, when): + """ + Update PYTEST_CURRENT_TEST to reflect the current item and stage. + + If ``when`` is None, delete PYTEST_CURRENT_TEST from the environment. + """ + var_name = "PYTEST_CURRENT_TEST" + if when: + value = "{} ({})".format(item.nodeid, when) + # don't allow null bytes on environment variables (see #2644, #2957) + value = value.replace("\x00", "(null)") + os.environ[var_name] = value + else: + os.environ.pop(var_name) + + +def pytest_report_teststatus(report): + if report.when in ("setup", "teardown"): + if report.failed: + # category, shortletter, verbose-word + return "error", "E", "ERROR" + elif report.skipped: + return "skipped", "s", "SKIPPED" + else: + return "", "", "" + + +# +# Implementation + + +def call_and_report(item, when, log=True, **kwds): + call = call_runtest_hook(item, when, **kwds) + hook = item.ihook + report = hook.pytest_runtest_makereport(item=item, call=call) + if log: + hook.pytest_runtest_logreport(report=report) + if check_interactive_exception(call, report): + hook.pytest_exception_interact(node=item, call=call, report=report) + return report + + +def check_interactive_exception(call, report): + return call.excinfo and not ( + hasattr(report, "wasxfail") + or call.excinfo.errisinstance(Skipped) + or call.excinfo.errisinstance(bdb.BdbQuit) + ) + + +def call_runtest_hook(item, when, **kwds): + hookname = "pytest_runtest_" + when + ihook = getattr(item.ihook, hookname) + reraise = (Exit,) + if not item.config.getoption("usepdb", False): + reraise += (KeyboardInterrupt,) + return CallInfo.from_call( + lambda: ihook(item=item, **kwds), when=when, reraise=reraise + ) + + +@attr.s(repr=False) +class CallInfo(object): + """ Result/Exception info a function invocation. """ + + _result = attr.ib() + # Optional[ExceptionInfo] + excinfo = attr.ib() + start = attr.ib() + stop = attr.ib() + when = attr.ib() + + @property + def result(self): + if self.excinfo is not None: + raise AttributeError("{!r} has no valid result".format(self)) + return self._result + + @classmethod + def from_call(cls, func, when, reraise=None): + #: context of invocation: one of "setup", "call", + #: "teardown", "memocollect" + start = time() + excinfo = None + try: + result = func() + except: # noqa + excinfo = ExceptionInfo.from_current() + if reraise is not None and excinfo.errisinstance(reraise): + raise + result = None + stop = time() + return cls(start=start, stop=stop, when=when, result=result, excinfo=excinfo) + + def __repr__(self): + if self.excinfo is not None: + status = "exception" + value = self.excinfo.value + else: + # TODO: investigate unification + value = repr(self._result) + status = "result" + return "".format( + when=self.when, value=value, status=status + ) + + +def pytest_runtest_makereport(item, call): + return TestReport.from_item_and_call(item, call) + + +def pytest_make_collect_report(collector): + call = CallInfo.from_call(lambda: list(collector.collect()), "collect") + longrepr = None + if not call.excinfo: + outcome = "passed" + else: + from _pytest import nose + + skip_exceptions = (Skipped,) + nose.get_skip_exceptions() + if call.excinfo.errisinstance(skip_exceptions): + outcome = "skipped" + r = collector._repr_failure_py(call.excinfo, "line").reprcrash + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + errorinfo = collector.repr_failure(call.excinfo) + if not hasattr(errorinfo, "toterminal"): + errorinfo = CollectErrorRepr(errorinfo) + longrepr = errorinfo + rep = CollectReport( + collector.nodeid, outcome, longrepr, getattr(call, "result", None) + ) + rep.call = call # see collect_one_node + return rep + + +class SetupState(object): + """ shared state for setting up/tearing down test items or collectors. """ + + def __init__(self): + self.stack = [] + self._finalizers = {} + + def addfinalizer(self, finalizer, colitem): + """ attach a finalizer to the given colitem. + if colitem is None, this will add a finalizer that + is called at the end of teardown_all(). + """ + assert colitem and not isinstance(colitem, tuple) + assert callable(finalizer) + # assert colitem in self.stack # some unit tests don't setup stack :/ + self._finalizers.setdefault(colitem, []).append(finalizer) + + def _pop_and_teardown(self): + colitem = self.stack.pop() + self._teardown_with_finalization(colitem) + + def _callfinalizers(self, colitem): + finalizers = self._finalizers.pop(colitem, None) + exc = None + while finalizers: + fin = finalizers.pop() + try: + fin() + except TEST_OUTCOME: + # XXX Only first exception will be seen by user, + # ideally all should be reported. + if exc is None: + exc = sys.exc_info() + if exc: + six.reraise(*exc) + + def _teardown_with_finalization(self, colitem): + self._callfinalizers(colitem) + if hasattr(colitem, "teardown"): + colitem.teardown() + for colitem in self._finalizers: + assert ( + colitem is None or colitem in self.stack or isinstance(colitem, tuple) + ) + + def teardown_all(self): + while self.stack: + self._pop_and_teardown() + for key in list(self._finalizers): + self._teardown_with_finalization(key) + assert not self._finalizers + + def teardown_exact(self, item, nextitem): + needed_collectors = nextitem and nextitem.listchain() or [] + self._teardown_towards(needed_collectors) + + def _teardown_towards(self, needed_collectors): + exc = None + while self.stack: + if self.stack == needed_collectors[: len(self.stack)]: + break + try: + self._pop_and_teardown() + except TEST_OUTCOME: + # XXX Only first exception will be seen by user, + # ideally all should be reported. + if exc is None: + exc = sys.exc_info() + if exc: + six.reraise(*exc) + + def prepare(self, colitem): + """ setup objects along the collector chain to the test-method + and teardown previously setup objects.""" + needed_collectors = colitem.listchain() + self._teardown_towards(needed_collectors) + + # check if the last collection node has raised an error + for col in self.stack: + if hasattr(col, "_prepare_exc"): + six.reraise(*col._prepare_exc) + for col in needed_collectors[len(self.stack) :]: + self.stack.append(col) + try: + col.setup() + except TEST_OUTCOME: + col._prepare_exc = sys.exc_info() + raise + + +def collect_one_node(collector): + ihook = collector.ihook + ihook.pytest_collectstart(collector=collector) + rep = ihook.pytest_make_collect_report(collector=collector) + call = rep.__dict__.pop("call", None) + if call and check_interactive_exception(call, rep): + ihook.pytest_exception_interact(node=collector, call=call, report=rep) + return rep diff --git a/venv/lib/python2.7/site-packages/_pytest/setuponly.py b/venv/lib/python2.7/site-packages/_pytest/setuponly.py new file mode 100644 index 0000000..0859011 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/setuponly.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys + +import pytest + + +def pytest_addoption(parser): + group = parser.getgroup("debugconfig") + group.addoption( + "--setuponly", + "--setup-only", + action="store_true", + help="only setup fixtures, do not execute tests.", + ) + group.addoption( + "--setupshow", + "--setup-show", + action="store_true", + help="show setup of fixtures while executing tests.", + ) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_fixture_setup(fixturedef, request): + yield + config = request.config + if config.option.setupshow: + if hasattr(request, "param"): + # Save the fixture parameter so ._show_fixture_action() can + # display it now and during the teardown (in .finish()). + if fixturedef.ids: + if callable(fixturedef.ids): + fixturedef.cached_param = fixturedef.ids(request.param) + else: + fixturedef.cached_param = fixturedef.ids[request.param_index] + else: + fixturedef.cached_param = request.param + _show_fixture_action(fixturedef, "SETUP") + + +def pytest_fixture_post_finalizer(fixturedef): + if hasattr(fixturedef, "cached_result"): + config = fixturedef._fixturemanager.config + if config.option.setupshow: + _show_fixture_action(fixturedef, "TEARDOWN") + if hasattr(fixturedef, "cached_param"): + del fixturedef.cached_param + + +def _show_fixture_action(fixturedef, msg): + config = fixturedef._fixturemanager.config + capman = config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture() + out, err = capman.read_global_capture() + + tw = config.get_terminal_writer() + tw.line() + tw.write(" " * 2 * fixturedef.scopenum) + tw.write( + "{step} {scope} {fixture}".format( + step=msg.ljust(8), # align the output to TEARDOWN + scope=fixturedef.scope[0].upper(), + fixture=fixturedef.argname, + ) + ) + + if msg == "SETUP": + deps = sorted(arg for arg in fixturedef.argnames if arg != "request") + if deps: + tw.write(" (fixtures used: {})".format(", ".join(deps))) + + if hasattr(fixturedef, "cached_param"): + tw.write("[{}]".format(fixturedef.cached_param)) + + if capman: + capman.resume_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config): + if config.option.setuponly: + config.option.setupshow = True diff --git a/venv/lib/python2.7/site-packages/_pytest/setupplan.py b/venv/lib/python2.7/site-packages/_pytest/setupplan.py new file mode 100644 index 0000000..47b0fe8 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/setupplan.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import pytest + + +def pytest_addoption(parser): + group = parser.getgroup("debugconfig") + group.addoption( + "--setupplan", + "--setup-plan", + action="store_true", + help="show what fixtures and tests would be executed but " + "don't execute anything.", + ) + + +@pytest.hookimpl(tryfirst=True) +def pytest_fixture_setup(fixturedef, request): + # Will return a dummy fixture if the setuponly option is provided. + if request.config.option.setupplan: + fixturedef.cached_result = (None, None, None) + return fixturedef.cached_result + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config): + if config.option.setupplan: + config.option.setuponly = True + config.option.setupshow = True diff --git a/venv/lib/python2.7/site-packages/_pytest/skipping.py b/venv/lib/python2.7/site-packages/_pytest/skipping.py new file mode 100644 index 0000000..bc8b88e --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/skipping.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +""" support for skip/xfail functions and markers. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from _pytest.config import hookimpl +from _pytest.mark.evaluate import MarkEvaluator +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption( + "--runxfail", + action="store_true", + dest="runxfail", + default=False, + help="report the results of xfail tests as if they were not marked", + ) + + parser.addini( + "xfail_strict", + "default for the strict parameter of xfail " + "markers when not given explicitly (default: False)", + default=False, + type="bool", + ) + + +def pytest_configure(config): + if config.option.runxfail: + # yay a hack + import pytest + + old = pytest.xfail + config._cleanup.append(lambda: setattr(pytest, "xfail", old)) + + def nop(*args, **kwargs): + pass + + nop.Exception = xfail.Exception + setattr(pytest, "xfail", nop) + + config.addinivalue_line( + "markers", + "skip(reason=None): skip the given test function with an optional reason. " + 'Example: skip(reason="no way of currently testing this") skips the ' + "test.", + ) + config.addinivalue_line( + "markers", + "skipif(condition): skip the given test function if eval(condition) " + "results in a True value. Evaluation happens within the " + "module global context. Example: skipif('sys.platform == \"win32\"') " + "skips the test if we are on the win32 platform. see " + "https://docs.pytest.org/en/latest/skipping.html", + ) + config.addinivalue_line( + "markers", + "xfail(condition, reason=None, run=True, raises=None, strict=False): " + "mark the test function as an expected failure if eval(condition) " + "has a True value. Optionally specify a reason for better reporting " + "and run=False if you don't even want to execute the test function. " + "If only specific exception(s) are expected, you can list them in " + "raises, and if the test fails in other ways, it will be reported as " + "a true failure. See https://docs.pytest.org/en/latest/skipping.html", + ) + + +@hookimpl(tryfirst=True) +def pytest_runtest_setup(item): + # Check if skip or skipif are specified as pytest marks + item._skipped_by_mark = False + eval_skipif = MarkEvaluator(item, "skipif") + if eval_skipif.istrue(): + item._skipped_by_mark = True + skip(eval_skipif.getexplanation()) + + for skip_info in item.iter_markers(name="skip"): + item._skipped_by_mark = True + if "reason" in skip_info.kwargs: + skip(skip_info.kwargs["reason"]) + elif skip_info.args: + skip(skip_info.args[0]) + else: + skip("unconditional skip") + + item._evalxfail = MarkEvaluator(item, "xfail") + check_xfail_no_run(item) + + +@hookimpl(hookwrapper=True) +def pytest_pyfunc_call(pyfuncitem): + check_xfail_no_run(pyfuncitem) + outcome = yield + passed = outcome.excinfo is None + if passed: + check_strict_xfail(pyfuncitem) + + +def check_xfail_no_run(item): + """check xfail(run=False)""" + if not item.config.option.runxfail: + evalxfail = item._evalxfail + if evalxfail.istrue(): + if not evalxfail.get("run", True): + xfail("[NOTRUN] " + evalxfail.getexplanation()) + + +def check_strict_xfail(pyfuncitem): + """check xfail(strict=True) for the given PASSING test""" + evalxfail = pyfuncitem._evalxfail + if evalxfail.istrue(): + strict_default = pyfuncitem.config.getini("xfail_strict") + is_strict_xfail = evalxfail.get("strict", strict_default) + if is_strict_xfail: + del pyfuncitem._evalxfail + explanation = evalxfail.getexplanation() + fail("[XPASS(strict)] " + explanation, pytrace=False) + + +@hookimpl(hookwrapper=True) +def pytest_runtest_makereport(item, call): + outcome = yield + rep = outcome.get_result() + evalxfail = getattr(item, "_evalxfail", None) + # unitttest special case, see setting of _unexpectedsuccess + if hasattr(item, "_unexpectedsuccess") and rep.when == "call": + from _pytest.compat import _is_unittest_unexpected_success_a_failure + + if item._unexpectedsuccess: + rep.longrepr = "Unexpected success: {}".format(item._unexpectedsuccess) + else: + rep.longrepr = "Unexpected success" + if _is_unittest_unexpected_success_a_failure(): + rep.outcome = "failed" + else: + rep.outcome = "passed" + rep.wasxfail = rep.longrepr + elif item.config.option.runxfail: + pass # don't interefere + elif call.excinfo and call.excinfo.errisinstance(xfail.Exception): + rep.wasxfail = "reason: " + call.excinfo.value.msg + rep.outcome = "skipped" + elif evalxfail and not rep.skipped and evalxfail.wasvalid() and evalxfail.istrue(): + if call.excinfo: + if evalxfail.invalidraise(call.excinfo.value): + rep.outcome = "failed" + else: + rep.outcome = "skipped" + rep.wasxfail = evalxfail.getexplanation() + elif call.when == "call": + strict_default = item.config.getini("xfail_strict") + is_strict_xfail = evalxfail.get("strict", strict_default) + explanation = evalxfail.getexplanation() + if is_strict_xfail: + rep.outcome = "failed" + rep.longrepr = "[XPASS(strict)] {}".format(explanation) + else: + rep.outcome = "passed" + rep.wasxfail = explanation + elif ( + getattr(item, "_skipped_by_mark", False) + and rep.skipped + and type(rep.longrepr) is tuple + ): + # skipped by mark.skipif; change the location of the failure + # to point to the item definition, otherwise it will display + # the location of where the skip exception was raised within pytest + filename, line, reason = rep.longrepr + filename, line = item.location[:2] + rep.longrepr = filename, line, reason + + +# called by terminalreporter progress reporting + + +def pytest_report_teststatus(report): + if hasattr(report, "wasxfail"): + if report.skipped: + return "xfailed", "x", "XFAIL" + elif report.passed: + return "xpassed", "X", "XPASS" diff --git a/venv/lib/python2.7/site-packages/_pytest/stepwise.py b/venv/lib/python2.7/site-packages/_pytest/stepwise.py new file mode 100644 index 0000000..8890259 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/stepwise.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +import pytest + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption( + "--sw", + "--stepwise", + action="store_true", + dest="stepwise", + help="exit on test failure and continue from last failing test next time", + ) + group.addoption( + "--stepwise-skip", + action="store_true", + dest="stepwise_skip", + help="ignore the first failing test but stop on the next failing test", + ) + + +@pytest.hookimpl +def pytest_configure(config): + config.pluginmanager.register(StepwisePlugin(config), "stepwiseplugin") + + +class StepwisePlugin: + def __init__(self, config): + self.config = config + self.active = config.getvalue("stepwise") + self.session = None + self.report_status = "" + + if self.active: + self.lastfailed = config.cache.get("cache/stepwise", None) + self.skip = config.getvalue("stepwise_skip") + + def pytest_sessionstart(self, session): + self.session = session + + def pytest_collection_modifyitems(self, session, config, items): + if not self.active: + return + if not self.lastfailed: + self.report_status = "no previously failed tests, not skipping." + return + + already_passed = [] + found = False + + # Make a list of all tests that have been run before the last failing one. + for item in items: + if item.nodeid == self.lastfailed: + found = True + break + else: + already_passed.append(item) + + # If the previously failed test was not found among the test items, + # do not skip any tests. + if not found: + self.report_status = "previously failed test not found, not skipping." + already_passed = [] + else: + self.report_status = "skipping {} already passed items.".format( + len(already_passed) + ) + + for item in already_passed: + items.remove(item) + + config.hook.pytest_deselected(items=already_passed) + + def pytest_runtest_logreport(self, report): + if not self.active: + return + + if report.failed: + if self.skip: + # Remove test from the failed ones (if it exists) and unset the skip option + # to make sure the following tests will not be skipped. + if report.nodeid == self.lastfailed: + self.lastfailed = None + + self.skip = False + else: + # Mark test as the last failing and interrupt the test session. + self.lastfailed = report.nodeid + self.session.shouldstop = ( + "Test failed, continuing from this test next run." + ) + + else: + # If the test was actually run and did pass. + if report.when == "call": + # Remove test from the failed ones, if exists. + if report.nodeid == self.lastfailed: + self.lastfailed = None + + def pytest_report_collectionfinish(self): + if self.active and self.config.getoption("verbose") >= 0 and self.report_status: + return "stepwise: %s" % self.report_status + + def pytest_sessionfinish(self, session): + if self.active: + self.config.cache.set("cache/stepwise", self.lastfailed) + else: + # Clear the list of failing tests if the plugin is not active. + self.config.cache.set("cache/stepwise", []) diff --git a/venv/lib/python2.7/site-packages/_pytest/terminal.py b/venv/lib/python2.7/site-packages/_pytest/terminal.py new file mode 100644 index 0000000..eb1970d --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/terminal.py @@ -0,0 +1,1085 @@ +# -*- coding: utf-8 -*- +""" terminal reporting of the full testing process. + +This is a good source for looking at the various reporting hooks. +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import argparse +import collections +import platform +import sys +import time +from functools import partial + +import attr +import pluggy +import py +import six +from more_itertools import collapse + +import pytest +from _pytest import nodes +from _pytest.main import EXIT_INTERRUPTED +from _pytest.main import EXIT_NOTESTSCOLLECTED +from _pytest.main import EXIT_OK +from _pytest.main import EXIT_TESTSFAILED +from _pytest.main import EXIT_USAGEERROR + +REPORT_COLLECTING_RESOLUTION = 0.5 + + +class MoreQuietAction(argparse.Action): + """ + a modified copy of the argparse count action which counts down and updates + the legacy quiet attribute at the same time + + used to unify verbosity handling + """ + + def __init__(self, option_strings, dest, default=None, required=False, help=None): + super(MoreQuietAction, self).__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + default=default, + required=required, + help=help, + ) + + def __call__(self, parser, namespace, values, option_string=None): + new_count = getattr(namespace, self.dest, 0) - 1 + setattr(namespace, self.dest, new_count) + # todo Deprecate config.quiet + namespace.quiet = getattr(namespace, "quiet", 0) + 1 + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting", "reporting", after="general") + group._addoption( + "-v", + "--verbose", + action="count", + default=0, + dest="verbose", + help="increase verbosity.", + ), + group._addoption( + "-q", + "--quiet", + action=MoreQuietAction, + default=0, + dest="verbose", + help="decrease verbosity.", + ), + group._addoption( + "--verbosity", dest="verbose", type=int, default=0, help="set verbosity" + ) + group._addoption( + "-r", + action="store", + dest="reportchars", + default="", + metavar="chars", + help="show extra test summary info as specified by chars: (f)ailed, " + "(E)rror, (s)kipped, (x)failed, (X)passed, " + "(p)assed, (P)assed with output, (a)ll except passed (p/P), or (A)ll. " + "Warnings are displayed at all times except when " + "--disable-warnings is set.", + ) + group._addoption( + "--disable-warnings", + "--disable-pytest-warnings", + default=False, + dest="disable_warnings", + action="store_true", + help="disable warnings summary", + ) + group._addoption( + "-l", + "--showlocals", + action="store_true", + dest="showlocals", + default=False, + help="show locals in tracebacks (disabled by default).", + ) + group._addoption( + "--tb", + metavar="style", + action="store", + dest="tbstyle", + default="auto", + choices=["auto", "long", "short", "no", "line", "native"], + help="traceback print mode (auto/long/short/line/native/no).", + ) + group._addoption( + "--show-capture", + action="store", + dest="showcapture", + choices=["no", "stdout", "stderr", "log", "all"], + default="all", + help="Controls how captured stdout/stderr/log is shown on failed tests. " + "Default is 'all'.", + ) + group._addoption( + "--fulltrace", + "--full-trace", + action="store_true", + default=False, + help="don't cut any tracebacks (default is to cut).", + ) + group._addoption( + "--color", + metavar="color", + action="store", + dest="color", + default="auto", + choices=["yes", "no", "auto"], + help="color terminal output (yes/no/auto).", + ) + + parser.addini( + "console_output_style", + help='console output: "classic", or with additional progress information ("progress" (percentage) | "count").', + default="progress", + ) + + +def pytest_configure(config): + reporter = TerminalReporter(config, sys.stdout) + config.pluginmanager.register(reporter, "terminalreporter") + if config.option.debug or config.option.traceconfig: + + def mywriter(tags, args): + msg = " ".join(map(str, args)) + reporter.write_line("[traceconfig] " + msg) + + config.trace.root.setprocessor("pytest:config", mywriter) + + +def getreportopt(config): + reportopts = "" + reportchars = config.option.reportchars + if not config.option.disable_warnings and "w" not in reportchars: + reportchars += "w" + elif config.option.disable_warnings and "w" in reportchars: + reportchars = reportchars.replace("w", "") + for char in reportchars: + if char == "a": + reportopts = "sxXwEf" + elif char == "A": + reportopts = "PpsxXwEf" + break + elif char not in reportopts: + reportopts += char + return reportopts + + +@pytest.hookimpl(trylast=True) # after _pytest.runner +def pytest_report_teststatus(report): + if report.passed: + letter = "." + elif report.skipped: + letter = "s" + elif report.failed: + letter = "F" + if report.when != "call": + letter = "f" + return report.outcome, letter, report.outcome.upper() + + +@attr.s +class WarningReport(object): + """ + Simple structure to hold warnings information captured by ``pytest_warning_captured``. + + :ivar str message: user friendly message about the warning + :ivar str|None nodeid: node id that generated the warning (see ``get_location``). + :ivar tuple|py.path.local fslocation: + file system location of the source of the warning (see ``get_location``). + """ + + message = attr.ib() + nodeid = attr.ib(default=None) + fslocation = attr.ib(default=None) + count_towards_summary = True + + def get_location(self, config): + """ + Returns the more user-friendly information about the location + of a warning, or None. + """ + if self.nodeid: + return self.nodeid + if self.fslocation: + if isinstance(self.fslocation, tuple) and len(self.fslocation) >= 2: + filename, linenum = self.fslocation[:2] + relpath = py.path.local(filename).relto(config.invocation_dir) + if not relpath: + relpath = str(filename) + return "%s:%s" % (relpath, linenum) + else: + return str(self.fslocation) + return None + + +class TerminalReporter(object): + def __init__(self, config, file=None): + import _pytest.config + + self.config = config + self._numcollected = 0 + self._session = None + self._showfspath = None + + self.stats = {} + self.startdir = config.invocation_dir + if file is None: + file = sys.stdout + self._tw = _pytest.config.create_terminal_writer(config, file) + # self.writer will be deprecated in pytest-3.4 + self.writer = self._tw + self._screen_width = self._tw.fullwidth + self.currentfspath = None + self.reportchars = getreportopt(config) + self.hasmarkup = self._tw.hasmarkup + self.isatty = file.isatty() + self._progress_nodeids_reported = set() + self._show_progress_info = self._determine_show_progress_info() + self._collect_report_last_write = None + + def _determine_show_progress_info(self): + """Return True if we should display progress information based on the current config""" + # do not show progress if we are not capturing output (#3038) + if self.config.getoption("capture", "no") == "no": + return False + # do not show progress if we are showing fixture setup/teardown + if self.config.getoption("setupshow", False): + return False + cfg = self.config.getini("console_output_style") + if cfg in ("progress", "count"): + return cfg + return False + + @property + def verbosity(self): + return self.config.option.verbose + + @property + def showheader(self): + return self.verbosity >= 0 + + @property + def showfspath(self): + if self._showfspath is None: + return self.verbosity >= 0 + return self._showfspath + + @showfspath.setter + def showfspath(self, value): + self._showfspath = value + + @property + def showlongtestinfo(self): + return self.verbosity > 0 + + def hasopt(self, char): + char = {"xfailed": "x", "skipped": "s"}.get(char, char) + return char in self.reportchars + + def write_fspath_result(self, nodeid, res, **markup): + fspath = self.config.rootdir.join(nodeid.split("::")[0]) + # NOTE: explicitly check for None to work around py bug, and for less + # overhead in general (https://github.com/pytest-dev/py/pull/207). + if self.currentfspath is None or fspath != self.currentfspath: + if self.currentfspath is not None and self._show_progress_info: + self._write_progress_information_filling_space() + self.currentfspath = fspath + fspath = self.startdir.bestrelpath(fspath) + self._tw.line() + self._tw.write(fspath + " ") + self._tw.write(res, **markup) + + def write_ensure_prefix(self, prefix, extra="", **kwargs): + if self.currentfspath != prefix: + self._tw.line() + self.currentfspath = prefix + self._tw.write(prefix) + if extra: + self._tw.write(extra, **kwargs) + self.currentfspath = -2 + + def ensure_newline(self): + if self.currentfspath: + self._tw.line() + self.currentfspath = None + + def write(self, content, **markup): + self._tw.write(content, **markup) + + def write_line(self, line, **markup): + if not isinstance(line, six.text_type): + line = six.text_type(line, errors="replace") + self.ensure_newline() + self._tw.line(line, **markup) + + def rewrite(self, line, **markup): + """ + Rewinds the terminal cursor to the beginning and writes the given line. + + :kwarg erase: if True, will also add spaces until the full terminal width to ensure + previous lines are properly erased. + + The rest of the keyword arguments are markup instructions. + """ + erase = markup.pop("erase", False) + if erase: + fill_count = self._tw.fullwidth - len(line) - 1 + fill = " " * fill_count + else: + fill = "" + line = str(line) + self._tw.write("\r" + line + fill, **markup) + + def write_sep(self, sep, title=None, **markup): + self.ensure_newline() + self._tw.sep(sep, title, **markup) + + def section(self, title, sep="=", **kw): + self._tw.sep(sep, title, **kw) + + def line(self, msg, **kw): + self._tw.line(msg, **kw) + + def pytest_internalerror(self, excrepr): + for line in six.text_type(excrepr).split("\n"): + self.write_line("INTERNALERROR> " + line) + return 1 + + def pytest_warning_captured(self, warning_message, item): + # from _pytest.nodes import get_fslocation_from_item + from _pytest.warnings import warning_record_to_str + + warnings = self.stats.setdefault("warnings", []) + fslocation = warning_message.filename, warning_message.lineno + message = warning_record_to_str(warning_message) + + nodeid = item.nodeid if item is not None else "" + warning_report = WarningReport( + fslocation=fslocation, message=message, nodeid=nodeid + ) + warnings.append(warning_report) + + def pytest_plugin_registered(self, plugin): + if self.config.option.traceconfig: + msg = "PLUGIN registered: %s" % (plugin,) + # XXX this event may happen during setup/teardown time + # which unfortunately captures our output here + # which garbles our output if we use self.write_line + self.write_line(msg) + + def pytest_deselected(self, items): + self.stats.setdefault("deselected", []).extend(items) + + def pytest_runtest_logstart(self, nodeid, location): + # ensure that the path is printed before the + # 1st test of a module starts running + if self.showlongtestinfo: + line = self._locationline(nodeid, *location) + self.write_ensure_prefix(line, "") + elif self.showfspath: + fsid = nodeid.split("::")[0] + self.write_fspath_result(fsid, "") + + def pytest_runtest_logreport(self, report): + self._tests_ran = True + rep = report + res = self.config.hook.pytest_report_teststatus(report=rep, config=self.config) + category, letter, word = res + if isinstance(word, tuple): + word, markup = word + else: + markup = None + self.stats.setdefault(category, []).append(rep) + if not letter and not word: + # probably passed setup/teardown + return + running_xdist = hasattr(rep, "node") + if markup is None: + was_xfail = hasattr(report, "wasxfail") + if rep.passed and not was_xfail: + markup = {"green": True} + elif rep.passed and was_xfail: + markup = {"yellow": True} + elif rep.failed: + markup = {"red": True} + elif rep.skipped: + markup = {"yellow": True} + else: + markup = {} + if self.verbosity <= 0: + if not running_xdist and self.showfspath: + self.write_fspath_result(rep.nodeid, letter, **markup) + else: + self._tw.write(letter, **markup) + else: + self._progress_nodeids_reported.add(rep.nodeid) + line = self._locationline(rep.nodeid, *rep.location) + if not running_xdist: + self.write_ensure_prefix(line, word, **markup) + if self._show_progress_info: + self._write_progress_information_filling_space() + else: + self.ensure_newline() + self._tw.write("[%s]" % rep.node.gateway.id) + if self._show_progress_info: + self._tw.write( + self._get_progress_information_message() + " ", cyan=True + ) + else: + self._tw.write(" ") + self._tw.write(word, **markup) + self._tw.write(" " + line) + self.currentfspath = -2 + + def pytest_runtest_logfinish(self, nodeid): + if self.verbosity <= 0 and self._show_progress_info: + if self._show_progress_info == "count": + num_tests = self._session.testscollected + progress_length = len(" [{}/{}]".format(str(num_tests), str(num_tests))) + else: + progress_length = len(" [100%]") + + self._progress_nodeids_reported.add(nodeid) + is_last_item = ( + len(self._progress_nodeids_reported) == self._session.testscollected + ) + if is_last_item: + self._write_progress_information_filling_space() + else: + w = self._width_of_current_line + past_edge = w + progress_length + 1 >= self._screen_width + if past_edge: + msg = self._get_progress_information_message() + self._tw.write(msg + "\n", cyan=True) + + def _get_progress_information_message(self): + collected = self._session.testscollected + if self._show_progress_info == "count": + if collected: + progress = self._progress_nodeids_reported + counter_format = "{{:{}d}}".format(len(str(collected))) + format_string = " [{}/{{}}]".format(counter_format) + return format_string.format(len(progress), collected) + return " [ {} / {} ]".format(collected, collected) + else: + if collected: + progress = len(self._progress_nodeids_reported) * 100 // collected + return " [{:3d}%]".format(progress) + return " [100%]" + + def _write_progress_information_filling_space(self): + msg = self._get_progress_information_message() + w = self._width_of_current_line + fill = self._tw.fullwidth - w - 1 + self.write(msg.rjust(fill), cyan=True) + + @property + def _width_of_current_line(self): + """Return the width of current line, using the superior implementation of py-1.6 when available""" + try: + return self._tw.width_of_current_line + except AttributeError: + # py < 1.6.0 + return self._tw.chars_on_current_line + + def pytest_collection(self): + if self.isatty: + if self.config.option.verbose >= 0: + self.write("collecting ... ", bold=True) + self._collect_report_last_write = time.time() + elif self.config.option.verbose >= 1: + self.write("collecting ... ", bold=True) + + def pytest_collectreport(self, report): + if report.failed: + self.stats.setdefault("error", []).append(report) + elif report.skipped: + self.stats.setdefault("skipped", []).append(report) + items = [x for x in report.result if isinstance(x, pytest.Item)] + self._numcollected += len(items) + if self.isatty: + self.report_collect() + + def report_collect(self, final=False): + if self.config.option.verbose < 0: + return + + if not final: + # Only write "collecting" report every 0.5s. + t = time.time() + if ( + self._collect_report_last_write is not None + and self._collect_report_last_write > t - REPORT_COLLECTING_RESOLUTION + ): + return + self._collect_report_last_write = t + + errors = len(self.stats.get("error", [])) + skipped = len(self.stats.get("skipped", [])) + deselected = len(self.stats.get("deselected", [])) + selected = self._numcollected - errors - skipped - deselected + if final: + line = "collected " + else: + line = "collecting " + line += ( + str(self._numcollected) + " item" + ("" if self._numcollected == 1 else "s") + ) + if errors: + line += " / %d errors" % errors + if deselected: + line += " / %d deselected" % deselected + if skipped: + line += " / %d skipped" % skipped + if self._numcollected > selected > 0: + line += " / %d selected" % selected + if self.isatty: + self.rewrite(line, bold=True, erase=True) + if final: + self.write("\n") + else: + self.write_line(line) + + @pytest.hookimpl(trylast=True) + def pytest_sessionstart(self, session): + self._session = session + self._sessionstarttime = time.time() + if not self.showheader: + return + self.write_sep("=", "test session starts", bold=True) + verinfo = platform.python_version() + msg = "platform %s -- Python %s" % (sys.platform, verinfo) + if hasattr(sys, "pypy_version_info"): + verinfo = ".".join(map(str, sys.pypy_version_info[:3])) + msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3]) + msg += ", pytest-%s, py-%s, pluggy-%s" % ( + pytest.__version__, + py.__version__, + pluggy.__version__, + ) + if ( + self.verbosity > 0 + or self.config.option.debug + or getattr(self.config.option, "pastebin", None) + ): + msg += " -- " + str(sys.executable) + self.write_line(msg) + lines = self.config.hook.pytest_report_header( + config=self.config, startdir=self.startdir + ) + self._write_report_lines_from_hooks(lines) + + def _write_report_lines_from_hooks(self, lines): + lines.reverse() + for line in collapse(lines): + self.write_line(line) + + def pytest_report_header(self, config): + line = "rootdir: %s" % config.rootdir + + if config.inifile: + line += ", inifile: " + config.rootdir.bestrelpath(config.inifile) + + testpaths = config.getini("testpaths") + if testpaths and config.args == testpaths: + rel_paths = [config.rootdir.bestrelpath(x) for x in testpaths] + line += ", testpaths: {}".format(", ".join(rel_paths)) + result = [line] + + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + result.append("plugins: %s" % ", ".join(_plugin_nameversions(plugininfo))) + return result + + def pytest_collection_finish(self, session): + self.report_collect(True) + + if self.config.getoption("collectonly"): + self._printcollecteditems(session.items) + + lines = self.config.hook.pytest_report_collectionfinish( + config=self.config, startdir=self.startdir, items=session.items + ) + self._write_report_lines_from_hooks(lines) + + if self.config.getoption("collectonly"): + if self.stats.get("failed"): + self._tw.sep("!", "collection failures") + for rep in self.stats.get("failed"): + rep.toterminal(self._tw) + + def _printcollecteditems(self, items): + # to print out items and their parent collectors + # we take care to leave out Instances aka () + # because later versions are going to get rid of them anyway + if self.config.option.verbose < 0: + if self.config.option.verbose < -1: + counts = {} + for item in items: + name = item.nodeid.split("::", 1)[0] + counts[name] = counts.get(name, 0) + 1 + for name, count in sorted(counts.items()): + self._tw.line("%s: %d" % (name, count)) + else: + for item in items: + self._tw.line(item.nodeid) + return + stack = [] + indent = "" + for item in items: + needed_collectors = item.listchain()[1:] # strip root node + while stack: + if stack == needed_collectors[: len(stack)]: + break + stack.pop() + for col in needed_collectors[len(stack) :]: + stack.append(col) + if col.name == "()": # Skip Instances. + continue + indent = (len(stack) - 1) * " " + self._tw.line("%s%s" % (indent, col)) + if self.config.option.verbose >= 1: + if hasattr(col, "_obj") and col._obj.__doc__: + for line in col._obj.__doc__.strip().splitlines(): + self._tw.line("%s%s" % (indent + " ", line.strip())) + + @pytest.hookimpl(hookwrapper=True) + def pytest_sessionfinish(self, exitstatus): + outcome = yield + outcome.get_result() + self._tw.line("") + summary_exit_codes = ( + EXIT_OK, + EXIT_TESTSFAILED, + EXIT_INTERRUPTED, + EXIT_USAGEERROR, + EXIT_NOTESTSCOLLECTED, + ) + if exitstatus in summary_exit_codes: + self.config.hook.pytest_terminal_summary( + terminalreporter=self, exitstatus=exitstatus, config=self.config + ) + if exitstatus == EXIT_INTERRUPTED: + self._report_keyboardinterrupt() + del self._keyboardinterrupt_memo + self.summary_stats() + + @pytest.hookimpl(hookwrapper=True) + def pytest_terminal_summary(self): + self.summary_errors() + self.summary_failures() + self.summary_warnings() + self.summary_passes() + yield + self.short_test_summary() + # Display any extra warnings from teardown here (if any). + self.summary_warnings() + + def pytest_keyboard_interrupt(self, excinfo): + self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) + + def pytest_unconfigure(self): + if hasattr(self, "_keyboardinterrupt_memo"): + self._report_keyboardinterrupt() + + def _report_keyboardinterrupt(self): + excrepr = self._keyboardinterrupt_memo + msg = excrepr.reprcrash.message + self.write_sep("!", msg) + if "KeyboardInterrupt" in msg: + if self.config.option.fulltrace: + excrepr.toterminal(self._tw) + else: + excrepr.reprcrash.toterminal(self._tw) + self._tw.line( + "(to show a full traceback on KeyboardInterrupt use --fulltrace)", + yellow=True, + ) + + def _locationline(self, nodeid, fspath, lineno, domain): + def mkrel(nodeid): + line = self.config.cwd_relative_nodeid(nodeid) + if domain and line.endswith(domain): + line = line[: -len(domain)] + values = domain.split("[") + values[0] = values[0].replace(".", "::") # don't replace '.' in params + line += "[".join(values) + return line + + # collect_fspath comes from testid which has a "/"-normalized path + + if fspath: + res = mkrel(nodeid) + if self.verbosity >= 2 and nodeid.split("::")[0] != fspath.replace( + "\\", nodes.SEP + ): + res += " <- " + self.startdir.bestrelpath(fspath) + else: + res = "[location]" + return res + " " + + def _getfailureheadline(self, rep): + head_line = rep.head_line + if head_line: + return head_line + return "test session" # XXX? + + def _getcrashline(self, rep): + try: + return str(rep.longrepr.reprcrash) + except AttributeError: + try: + return str(rep.longrepr)[:50] + except AttributeError: + return "" + + # + # summaries for sessionfinish + # + def getreports(self, name): + values = [] + for x in self.stats.get(name, []): + if not hasattr(x, "_pdbshown"): + values.append(x) + return values + + def summary_warnings(self): + if self.hasopt("w"): + all_warnings = self.stats.get("warnings") + if not all_warnings: + return + + final = hasattr(self, "_already_displayed_warnings") + if final: + warning_reports = all_warnings[self._already_displayed_warnings :] + else: + warning_reports = all_warnings + self._already_displayed_warnings = len(warning_reports) + if not warning_reports: + return + + reports_grouped_by_message = collections.OrderedDict() + for wr in warning_reports: + reports_grouped_by_message.setdefault(wr.message, []).append(wr) + + title = "warnings summary (final)" if final else "warnings summary" + self.write_sep("=", title, yellow=True, bold=False) + for message, warning_reports in reports_grouped_by_message.items(): + has_any_location = False + for w in warning_reports: + location = w.get_location(self.config) + if location: + self._tw.line(str(location)) + has_any_location = True + if has_any_location: + lines = message.splitlines() + indented = "\n".join(" " + x for x in lines) + message = indented.rstrip() + else: + message = message.rstrip() + self._tw.line(message) + self._tw.line() + self._tw.line("-- Docs: https://docs.pytest.org/en/latest/warnings.html") + + def summary_passes(self): + if self.config.option.tbstyle != "no": + if self.hasopt("P"): + reports = self.getreports("passed") + if not reports: + return + self.write_sep("=", "PASSES") + for rep in reports: + if rep.sections: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, green=True, bold=True) + self._outrep_summary(rep) + + def print_teardown_sections(self, rep): + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + if "teardown" in secname: + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_failures(self): + if self.config.option.tbstyle != "no": + reports = self.getreports("failed") + if not reports: + return + self.write_sep("=", "FAILURES") + if self.config.option.tbstyle == "line": + for rep in reports: + line = self._getcrashline(rep) + self.write_line(line) + else: + teardown_sections = {} + for report in self.getreports(""): + if report.when == "teardown": + teardown_sections.setdefault(report.nodeid, []).append(report) + + for rep in reports: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + for report in teardown_sections.get(rep.nodeid, []): + self.print_teardown_sections(report) + + def summary_errors(self): + if self.config.option.tbstyle != "no": + reports = self.getreports("error") + if not reports: + return + self.write_sep("=", "ERRORS") + for rep in self.stats["error"]: + msg = self._getfailureheadline(rep) + if rep.when == "collect": + msg = "ERROR collecting " + msg + else: + msg = "ERROR at %s of %s" % (rep.when, msg) + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + + def _outrep_summary(self, rep): + rep.toterminal(self._tw) + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_stats(self): + session_duration = time.time() - self._sessionstarttime + (line, color) = build_summary_stats_line(self.stats) + msg = "%s in %.2f seconds" % (line, session_duration) + markup = {color: True, "bold": True} + + if self.verbosity >= 0: + self.write_sep("=", msg, **markup) + if self.verbosity == -1: + self.write_line(msg, **markup) + + def short_test_summary(self): + if not self.reportchars: + return + + def show_simple(stat, lines): + failed = self.stats.get(stat, []) + if not failed: + return + termwidth = self.writer.fullwidth + config = self.config + for rep in failed: + line = _get_line_with_reprcrash_message(config, rep, termwidth) + lines.append(line) + + def show_xfailed(lines): + xfailed = self.stats.get("xfailed", []) + for rep in xfailed: + verbose_word = rep._get_verbose_word(self.config) + pos = _get_pos(self.config, rep) + lines.append("%s %s" % (verbose_word, pos)) + reason = rep.wasxfail + if reason: + lines.append(" " + str(reason)) + + def show_xpassed(lines): + xpassed = self.stats.get("xpassed", []) + for rep in xpassed: + verbose_word = rep._get_verbose_word(self.config) + pos = _get_pos(self.config, rep) + reason = rep.wasxfail + lines.append("%s %s %s" % (verbose_word, pos, reason)) + + def show_skipped(lines): + skipped = self.stats.get("skipped", []) + fskips = _folded_skips(skipped) if skipped else [] + if not fskips: + return + verbose_word = skipped[0]._get_verbose_word(self.config) + for num, fspath, lineno, reason in fskips: + if reason.startswith("Skipped: "): + reason = reason[9:] + if lineno is not None: + lines.append( + "%s [%d] %s:%d: %s" + % (verbose_word, num, fspath, lineno + 1, reason) + ) + else: + lines.append("%s [%d] %s: %s" % (verbose_word, num, fspath, reason)) + + REPORTCHAR_ACTIONS = { + "x": show_xfailed, + "X": show_xpassed, + "f": partial(show_simple, "failed"), + "F": partial(show_simple, "failed"), + "s": show_skipped, + "S": show_skipped, + "p": partial(show_simple, "passed"), + "E": partial(show_simple, "error"), + } + + lines = [] + for char in self.reportchars: + action = REPORTCHAR_ACTIONS.get(char) + if action: # skipping e.g. "P" (passed with output) here. + action(lines) + + if lines: + self.write_sep("=", "short test summary info") + for line in lines: + self.write_line(line) + + +def _get_pos(config, rep): + nodeid = config.cwd_relative_nodeid(rep.nodeid) + return nodeid + + +def _get_line_with_reprcrash_message(config, rep, termwidth): + """Get summary line for a report, trying to add reprcrash message.""" + from wcwidth import wcswidth + + verbose_word = rep._get_verbose_word(config) + pos = _get_pos(config, rep) + + line = "%s %s" % (verbose_word, pos) + len_line = wcswidth(line) + ellipsis, len_ellipsis = "...", 3 + if len_line > termwidth - len_ellipsis: + # No space for an additional message. + return line + + try: + msg = rep.longrepr.reprcrash.message + except AttributeError: + pass + else: + # Only use the first line. + i = msg.find("\n") + if i != -1: + msg = msg[:i] + len_msg = wcswidth(msg) + + sep, len_sep = " - ", 3 + max_len_msg = termwidth - len_line - len_sep + if max_len_msg >= len_ellipsis: + if len_msg > max_len_msg: + max_len_msg -= len_ellipsis + msg = msg[:max_len_msg] + while wcswidth(msg) > max_len_msg: + msg = msg[:-1] + if six.PY2: + # on python 2 systems with narrow unicode compilation, trying to + # get a single character out of a multi-byte unicode character such as + # u'😄' will result in a High Surrogate (U+D83D) character, which is + # rendered as u'�'; in this case we just strip that character out as it + # serves no purpose being rendered + try: + surrogate = six.unichr(0xD83D) + msg = msg.rstrip(surrogate) + except ValueError: # pragma: no cover + # Jython cannot represent this lone surrogate at all (#5256): + # ValueError: unichr() arg is a lone surrogate in range + # (0xD800, 0xDFFF) (Jython UTF-16 encoding) + # ignore this case as it shouldn't appear in the string anyway + pass + msg += ellipsis + line += sep + msg + return line + + +def _folded_skips(skipped): + d = {} + for event in skipped: + key = event.longrepr + assert len(key) == 3, (event, key) + keywords = getattr(event, "keywords", {}) + # folding reports with global pytestmark variable + # this is workaround, because for now we cannot identify the scope of a skip marker + # TODO: revisit after marks scope would be fixed + if ( + event.when == "setup" + and "skip" in keywords + and "pytestmark" not in keywords + ): + key = (key[0], None, key[2]) + d.setdefault(key, []).append(event) + values = [] + for key, events in d.items(): + values.append((len(events),) + key) + return values + + +def build_summary_stats_line(stats): + known_types = ( + "failed passed skipped deselected xfailed xpassed warnings error".split() + ) + unknown_type_seen = False + for found_type in stats: + if found_type not in known_types: + if found_type: # setup/teardown reports have an empty key, ignore them + known_types.append(found_type) + unknown_type_seen = True + parts = [] + for key in known_types: + reports = stats.get(key, None) + if reports: + count = sum( + 1 for rep in reports if getattr(rep, "count_towards_summary", True) + ) + parts.append("%d %s" % (count, key)) + + if parts: + line = ", ".join(parts) + else: + line = "no tests ran" + + if "failed" in stats or "error" in stats: + color = "red" + elif "warnings" in stats or unknown_type_seen: + color = "yellow" + elif "passed" in stats: + color = "green" + else: + color = "yellow" + + return line, color + + +def _plugin_nameversions(plugininfo): + values = [] + for plugin, dist in plugininfo: + # gets us name and version! + name = "{dist.project_name}-{dist.version}".format(dist=dist) + # questionable convenience, but it keeps things short + if name.startswith("pytest-"): + name = name[7:] + # we decided to print python package names + # they can have more than one plugin + if name not in values: + values.append(name) + return values diff --git a/venv/lib/python2.7/site-packages/_pytest/tmpdir.py b/venv/lib/python2.7/site-packages/_pytest/tmpdir.py new file mode 100644 index 0000000..a8a7037 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/tmpdir.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +""" support for providing temporary directories to test functions. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import re +import tempfile +import warnings + +import attr +import py +import six + +import pytest +from .pathlib import ensure_reset_dir +from .pathlib import LOCK_TIMEOUT +from .pathlib import make_numbered_dir +from .pathlib import make_numbered_dir_with_cleanup +from .pathlib import Path +from _pytest.monkeypatch import MonkeyPatch + + +@attr.s +class TempPathFactory(object): + """Factory for temporary directories under the common base temp directory. + + The base directory can be configured using the ``--basetemp`` option.""" + + _given_basetemp = attr.ib( + # using os.path.abspath() to get absolute path instead of resolve() as it + # does not work the same in all platforms (see #4427) + # Path.absolute() exists, but it is not public (see https://bugs.python.org/issue25012) + converter=attr.converters.optional( + lambda p: Path(os.path.abspath(six.text_type(p))) + ) + ) + _trace = attr.ib() + _basetemp = attr.ib(default=None) + + @classmethod + def from_config(cls, config): + """ + :param config: a pytest configuration + """ + return cls( + given_basetemp=config.option.basetemp, trace=config.trace.get("tmpdir") + ) + + def mktemp(self, basename, numbered=True): + """makes a temporary directory managed by the factory""" + if not numbered: + p = self.getbasetemp().joinpath(basename) + p.mkdir() + else: + p = make_numbered_dir(root=self.getbasetemp(), prefix=basename) + self._trace("mktemp", p) + return p + + def getbasetemp(self): + """ return base temporary directory. """ + if self._basetemp is not None: + return self._basetemp + + if self._given_basetemp is not None: + basetemp = self._given_basetemp + ensure_reset_dir(basetemp) + basetemp = basetemp.resolve() + else: + from_env = os.environ.get("PYTEST_DEBUG_TEMPROOT") + temproot = Path(from_env or tempfile.gettempdir()).resolve() + user = get_user() or "unknown" + # use a sub-directory in the temproot to speed-up + # make_numbered_dir() call + rootdir = temproot.joinpath("pytest-of-{}".format(user)) + rootdir.mkdir(exist_ok=True) + basetemp = make_numbered_dir_with_cleanup( + prefix="pytest-", root=rootdir, keep=3, lock_timeout=LOCK_TIMEOUT + ) + assert basetemp is not None, basetemp + self._basetemp = t = basetemp + self._trace("new basetemp", t) + return t + + +@attr.s +class TempdirFactory(object): + """ + backward comptibility wrapper that implements + :class:``py.path.local`` for :class:``TempPathFactory`` + """ + + _tmppath_factory = attr.ib() + + def ensuretemp(self, string, dir=1): + """ (deprecated) return temporary directory path with + the given string as the trailing part. It is usually + better to use the 'tmpdir' function argument which + provides an empty unique-per-test-invocation directory + and is guaranteed to be empty. + """ + # py.log._apiwarn(">1.1", "use tmpdir function argument") + from .deprecated import PYTEST_ENSURETEMP + + warnings.warn(PYTEST_ENSURETEMP, stacklevel=2) + return self.getbasetemp().ensure(string, dir=dir) + + def mktemp(self, basename, numbered=True): + """Create a subdirectory of the base temporary directory and return it. + If ``numbered``, ensure the directory is unique by adding a number + prefix greater than any existing one. + """ + return py.path.local(self._tmppath_factory.mktemp(basename, numbered).resolve()) + + def getbasetemp(self): + """backward compat wrapper for ``_tmppath_factory.getbasetemp``""" + return py.path.local(self._tmppath_factory.getbasetemp().resolve()) + + +def get_user(): + """Return the current user name, or None if getuser() does not work + in the current environment (see #1010). + """ + import getpass + + try: + return getpass.getuser() + except (ImportError, KeyError): + return None + + +def pytest_configure(config): + """Create a TempdirFactory and attach it to the config object. + + This is to comply with existing plugins which expect the handler to be + available at pytest_configure time, but ideally should be moved entirely + to the tmpdir_factory session fixture. + """ + mp = MonkeyPatch() + tmppath_handler = TempPathFactory.from_config(config) + t = TempdirFactory(tmppath_handler) + config._cleanup.append(mp.undo) + mp.setattr(config, "_tmp_path_factory", tmppath_handler, raising=False) + mp.setattr(config, "_tmpdirhandler", t, raising=False) + mp.setattr(pytest, "ensuretemp", t.ensuretemp, raising=False) + + +@pytest.fixture(scope="session") +def tmpdir_factory(request): + """Return a :class:`_pytest.tmpdir.TempdirFactory` instance for the test session. + """ + return request.config._tmpdirhandler + + +@pytest.fixture(scope="session") +def tmp_path_factory(request): + """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session. + """ + return request.config._tmp_path_factory + + +def _mk_tmp(request, factory): + name = request.node.name + name = re.sub(r"[\W]", "_", name) + MAXVAL = 30 + name = name[:MAXVAL] + return factory.mktemp(name, numbered=True) + + +@pytest.fixture +def tmpdir(tmp_path): + """Return a temporary directory path object + which is unique to each test function invocation, + created as a sub directory of the base temporary + directory. The returned object is a `py.path.local`_ + path object. + + .. _`py.path.local`: https://py.readthedocs.io/en/latest/path.html + """ + return py.path.local(tmp_path) + + +@pytest.fixture +def tmp_path(request, tmp_path_factory): + """Return a temporary directory path object + which is unique to each test function invocation, + created as a sub directory of the base temporary + directory. The returned object is a :class:`pathlib.Path` + object. + + .. note:: + + in python < 3.6 this is a pathlib2.Path + """ + + return _mk_tmp(request, tmp_path_factory) diff --git a/venv/lib/python2.7/site-packages/_pytest/unittest.py b/venv/lib/python2.7/site-packages/_pytest/unittest.py new file mode 100644 index 0000000..3ff6f45 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/unittest.py @@ -0,0 +1,291 @@ +# -*- coding: utf-8 -*- +""" discovery and running of std-library "unittest" style tests. """ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys +import traceback + +import _pytest._code +import pytest +from _pytest.compat import getimfunc +from _pytest.config import hookimpl +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.python import Class +from _pytest.python import Function + + +def pytest_pycollect_makeitem(collector, name, obj): + # has unittest been imported and is obj a subclass of its TestCase? + try: + if not issubclass(obj, sys.modules["unittest"].TestCase): + return + except Exception: + return + # yes, so let's collect it + return UnitTestCase(name, parent=collector) + + +class UnitTestCase(Class): + # marker for fixturemanger.getfixtureinfo() + # to declare that our children do not support funcargs + nofuncargs = True + + def collect(self): + from unittest import TestLoader + + cls = self.obj + if not getattr(cls, "__test__", True): + return + + skipped = getattr(cls, "__unittest_skip__", False) + if not skipped: + self._inject_setup_teardown_fixtures(cls) + self._inject_setup_class_fixture() + + self.session._fixturemanager.parsefactories(self, unittest=True) + loader = TestLoader() + foundsomething = False + for name in loader.getTestCaseNames(self.obj): + x = getattr(self.obj, name) + if not getattr(x, "__test__", True): + continue + funcobj = getimfunc(x) + yield TestCaseFunction(name, parent=self, callobj=funcobj) + foundsomething = True + + if not foundsomething: + runtest = getattr(self.obj, "runTest", None) + if runtest is not None: + ut = sys.modules.get("twisted.trial.unittest", None) + if ut is None or runtest != ut.TestCase.runTest: + yield TestCaseFunction("runTest", parent=self) + + def _inject_setup_teardown_fixtures(self, cls): + """Injects a hidden auto-use fixture to invoke setUpClass/setup_method and corresponding + teardown functions (#517)""" + class_fixture = _make_xunit_fixture( + cls, "setUpClass", "tearDownClass", scope="class", pass_self=False + ) + if class_fixture: + cls.__pytest_class_setup = class_fixture + + method_fixture = _make_xunit_fixture( + cls, "setup_method", "teardown_method", scope="function", pass_self=True + ) + if method_fixture: + cls.__pytest_method_setup = method_fixture + + +def _make_xunit_fixture(obj, setup_name, teardown_name, scope, pass_self): + setup = getattr(obj, setup_name, None) + teardown = getattr(obj, teardown_name, None) + if setup is None and teardown is None: + return None + + @pytest.fixture(scope=scope, autouse=True) + def fixture(self, request): + if getattr(self, "__unittest_skip__", None): + reason = self.__unittest_skip_why__ + pytest.skip(reason) + if setup is not None: + if pass_self: + setup(self, request.function) + else: + setup() + yield + if teardown is not None: + if pass_self: + teardown(self, request.function) + else: + teardown() + + return fixture + + +class TestCaseFunction(Function): + nofuncargs = True + _excinfo = None + _testcase = None + + def setup(self): + self._testcase = self.parent.obj(self.name) + self._fix_unittest_skip_decorator() + self._obj = getattr(self._testcase, self.name) + if hasattr(self, "_request"): + self._request._fillfixtures() + + def _fix_unittest_skip_decorator(self): + """ + The @unittest.skip decorator calls functools.wraps(self._testcase) + The call to functools.wraps() fails unless self._testcase + has a __name__ attribute. This is usually automatically supplied + if the test is a function or method, but we need to add manually + here. + + See issue #1169 + """ + if sys.version_info[0] == 2: + setattr(self._testcase, "__name__", self.name) + + def teardown(self): + self._testcase = None + self._obj = None + + def startTest(self, testcase): + pass + + def _addexcinfo(self, rawexcinfo): + # unwrap potential exception info (see twisted trial support below) + rawexcinfo = getattr(rawexcinfo, "_rawexcinfo", rawexcinfo) + try: + excinfo = _pytest._code.ExceptionInfo(rawexcinfo) + # invoke the attributes to trigger storing the traceback + # trial causes some issue there + excinfo.value + excinfo.traceback + except TypeError: + try: + try: + values = traceback.format_exception(*rawexcinfo) + values.insert( + 0, + "NOTE: Incompatible Exception Representation, " + "displaying natively:\n\n", + ) + fail("".join(values), pytrace=False) + except (fail.Exception, KeyboardInterrupt): + raise + except: # noqa + fail( + "ERROR: Unknown Incompatible Exception " + "representation:\n%r" % (rawexcinfo,), + pytrace=False, + ) + except KeyboardInterrupt: + raise + except fail.Exception: + excinfo = _pytest._code.ExceptionInfo.from_current() + self.__dict__.setdefault("_excinfo", []).append(excinfo) + + def addError(self, testcase, rawexcinfo): + self._addexcinfo(rawexcinfo) + + def addFailure(self, testcase, rawexcinfo): + self._addexcinfo(rawexcinfo) + + def addSkip(self, testcase, reason): + try: + skip(reason) + except skip.Exception: + self._skipped_by_mark = True + self._addexcinfo(sys.exc_info()) + + def addExpectedFailure(self, testcase, rawexcinfo, reason=""): + try: + xfail(str(reason)) + except xfail.Exception: + self._addexcinfo(sys.exc_info()) + + def addUnexpectedSuccess(self, testcase, reason=""): + self._unexpectedsuccess = reason + + def addSuccess(self, testcase): + pass + + def stopTest(self, testcase): + pass + + def _handle_skip(self): + # implements the skipping machinery (see #2137) + # analog to pythons Lib/unittest/case.py:run + testMethod = getattr(self._testcase, self._testcase._testMethodName) + if getattr(self._testcase.__class__, "__unittest_skip__", False) or getattr( + testMethod, "__unittest_skip__", False + ): + # If the class or method was skipped. + skip_why = getattr( + self._testcase.__class__, "__unittest_skip_why__", "" + ) or getattr(testMethod, "__unittest_skip_why__", "") + try: # PY3, unittest2 on PY2 + self._testcase._addSkip(self, self._testcase, skip_why) + except TypeError: # PY2 + if sys.version_info[0] != 2: + raise + self._testcase._addSkip(self, skip_why) + return True + return False + + def runtest(self): + if self.config.pluginmanager.get_plugin("pdbinvoke") is None: + self._testcase(result=self) + else: + # disables tearDown and cleanups for post mortem debugging (see #1890) + if self._handle_skip(): + return + self._testcase.debug() + + def _prunetraceback(self, excinfo): + Function._prunetraceback(self, excinfo) + traceback = excinfo.traceback.filter( + lambda x: not x.frame.f_globals.get("__unittest") + ) + if traceback: + excinfo.traceback = traceback + + +@hookimpl(tryfirst=True) +def pytest_runtest_makereport(item, call): + if isinstance(item, TestCaseFunction): + if item._excinfo: + call.excinfo = item._excinfo.pop(0) + try: + del call.result + except AttributeError: + pass + + +# twisted trial support + + +@hookimpl(hookwrapper=True) +def pytest_runtest_protocol(item): + if isinstance(item, TestCaseFunction) and "twisted.trial.unittest" in sys.modules: + ut = sys.modules["twisted.python.failure"] + Failure__init__ = ut.Failure.__init__ + check_testcase_implements_trial_reporter() + + def excstore( + self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None + ): + if exc_value is None: + self._rawexcinfo = sys.exc_info() + else: + if exc_type is None: + exc_type = type(exc_value) + self._rawexcinfo = (exc_type, exc_value, exc_tb) + try: + Failure__init__( + self, exc_value, exc_type, exc_tb, captureVars=captureVars + ) + except TypeError: + Failure__init__(self, exc_value, exc_type, exc_tb) + + ut.Failure.__init__ = excstore + yield + ut.Failure.__init__ = Failure__init__ + else: + yield + + +def check_testcase_implements_trial_reporter(done=[]): + if done: + return + from zope.interface import classImplements + from twisted.trial.itrial import IReporter + + classImplements(TestCaseFunction, IReporter) + done.append(1) diff --git a/venv/lib/python2.7/site-packages/_pytest/warning_types.py b/venv/lib/python2.7/site-packages/_pytest/warning_types.py new file mode 100644 index 0000000..861010a --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/warning_types.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +import attr + + +class PytestWarning(UserWarning): + """ + Bases: :class:`UserWarning`. + + Base class for all warnings emitted by pytest. + """ + + +class PytestAssertRewriteWarning(PytestWarning): + """ + Bases: :class:`PytestWarning`. + + Warning emitted by the pytest assert rewrite module. + """ + + +class PytestCacheWarning(PytestWarning): + """ + Bases: :class:`PytestWarning`. + + Warning emitted by the cache plugin in various situations. + """ + + +class PytestConfigWarning(PytestWarning): + """ + Bases: :class:`PytestWarning`. + + Warning emitted for configuration issues. + """ + + +class PytestCollectionWarning(PytestWarning): + """ + Bases: :class:`PytestWarning`. + + Warning emitted when pytest is not able to collect a file or symbol in a module. + """ + + +class PytestDeprecationWarning(PytestWarning, DeprecationWarning): + """ + Bases: :class:`pytest.PytestWarning`, :class:`DeprecationWarning`. + + Warning class for features that will be removed in a future version. + """ + + +class PytestExperimentalApiWarning(PytestWarning, FutureWarning): + """ + Bases: :class:`pytest.PytestWarning`, :class:`FutureWarning`. + + Warning category used to denote experiments in pytest. Use sparingly as the API might change or even be + removed completely in future version + """ + + @classmethod + def simple(cls, apiname): + return cls( + "{apiname} is an experimental api that may change over time".format( + apiname=apiname + ) + ) + + +class PytestUnhandledCoroutineWarning(PytestWarning): + """ + Bases: :class:`PytestWarning`. + + Warning emitted when pytest encounters a test function which is a coroutine, + but it was not handled by any async-aware plugin. Coroutine test functions + are not natively supported. + """ + + +class PytestUnknownMarkWarning(PytestWarning): + """ + Bases: :class:`PytestWarning`. + + Warning emitted on use of unknown markers. + See https://docs.pytest.org/en/latest/mark.html for details. + """ + + +class RemovedInPytest4Warning(PytestDeprecationWarning): + """ + Bases: :class:`pytest.PytestDeprecationWarning`. + + Warning class for features scheduled to be removed in pytest 4.0. + """ + + +@attr.s +class UnformattedWarning(object): + """Used to hold warnings that need to format their message at runtime, as opposed to a direct message. + + Using this class avoids to keep all the warning types and messages in this module, avoiding misuse. + """ + + category = attr.ib() + template = attr.ib() + + def format(self, **kwargs): + """Returns an instance of the warning category, formatted with given kwargs""" + return self.category(self.template.format(**kwargs)) + + +PYTESTER_COPY_EXAMPLE = PytestExperimentalApiWarning.simple("testdir.copy_example") diff --git a/venv/lib/python2.7/site-packages/_pytest/warnings.py b/venv/lib/python2.7/site-packages/_pytest/warnings.py new file mode 100644 index 0000000..a3debae --- /dev/null +++ b/venv/lib/python2.7/site-packages/_pytest/warnings.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import sys +import warnings +from contextlib import contextmanager + +import pytest +from _pytest import compat + +SHOW_PYTEST_WARNINGS_ARG = "-Walways::pytest.RemovedInPytest4Warning" + + +def _setoption(wmod, arg): + """ + Copy of the warning._setoption function but does not escape arguments. + """ + parts = arg.split(":") + if len(parts) > 5: + raise wmod._OptionError("too many fields (max 5): %r" % (arg,)) + while len(parts) < 5: + parts.append("") + action, message, category, module, lineno = [s.strip() for s in parts] + action = wmod._getaction(action) + category = wmod._getcategory(category) + if lineno: + try: + lineno = int(lineno) + if lineno < 0: + raise ValueError + except (ValueError, OverflowError): + raise wmod._OptionError("invalid lineno %r" % (lineno,)) + else: + lineno = 0 + wmod.filterwarnings(action, message, category, module, lineno) + + +def pytest_addoption(parser): + group = parser.getgroup("pytest-warnings") + group.addoption( + "-W", + "--pythonwarnings", + action="append", + help="set which warnings to report, see -W option of python itself.", + ) + parser.addini( + "filterwarnings", + type="linelist", + help="Each line specifies a pattern for " + "warnings.filterwarnings. " + "Processed after -W and --pythonwarnings.", + ) + + +def pytest_configure(config): + config.addinivalue_line( + "markers", + "filterwarnings(warning): add a warning filter to the given test. " + "see https://docs.pytest.org/en/latest/warnings.html#pytest-mark-filterwarnings ", + ) + + +@contextmanager +def catch_warnings_for_item(config, ihook, when, item): + """ + Context manager that catches warnings generated in the contained execution block. + + ``item`` can be None if we are not in the context of an item execution. + + Each warning captured triggers the ``pytest_warning_captured`` hook. + """ + cmdline_filters = config.getoption("pythonwarnings") or [] + inifilters = config.getini("filterwarnings") + with warnings.catch_warnings(record=True) as log: + + if not sys.warnoptions: + # if user is not explicitly configuring warning filters, show deprecation warnings by default (#2908) + warnings.filterwarnings("always", category=DeprecationWarning) + warnings.filterwarnings("always", category=PendingDeprecationWarning) + + warnings.filterwarnings("error", category=pytest.RemovedInPytest4Warning) + + # filters should have this precedence: mark, cmdline options, ini + # filters should be applied in the inverse order of precedence + for arg in inifilters: + _setoption(warnings, arg) + + for arg in cmdline_filters: + warnings._setoption(arg) + + if item is not None: + for mark in item.iter_markers(name="filterwarnings"): + for arg in mark.args: + _setoption(warnings, arg) + + yield + + for warning_message in log: + ihook.pytest_warning_captured.call_historic( + kwargs=dict(warning_message=warning_message, when=when, item=item) + ) + + +def warning_record_to_str(warning_message): + """Convert a warnings.WarningMessage to a string. + + This takes lot of unicode shenaningans into account for Python 2. + When Python 2 support is dropped this function can be greatly simplified. + """ + warn_msg = warning_message.message + unicode_warning = False + if compat._PY2 and any(isinstance(m, compat.UNICODE_TYPES) for m in warn_msg.args): + new_args = [] + for m in warn_msg.args: + new_args.append( + compat.ascii_escaped(m) if isinstance(m, compat.UNICODE_TYPES) else m + ) + unicode_warning = list(warn_msg.args) != new_args + warn_msg.args = new_args + + msg = warnings.formatwarning( + warn_msg, + warning_message.category, + warning_message.filename, + warning_message.lineno, + warning_message.line, + ) + if unicode_warning: + warnings.warn( + "Warning is using unicode non convertible to ascii, " + "converting to a safe representation:\n {!r}".format(compat.safe_str(msg)), + UnicodeWarning, + ) + return msg + + +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_runtest_protocol(item): + with catch_warnings_for_item( + config=item.config, ihook=item.ihook, when="runtest", item=item + ): + yield + + +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_collection(session): + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="collect", item=None + ): + yield + + +@pytest.hookimpl(hookwrapper=True) +def pytest_terminal_summary(terminalreporter): + config = terminalreporter.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + yield + + +def _issue_warning_captured(warning, hook, stacklevel): + """ + This function should be used instead of calling ``warnings.warn`` directly when we are in the "configure" stage: + at this point the actual options might not have been set, so we manually trigger the pytest_warning_captured + hook so we can display this warnings in the terminal. This is a hack until we can sort out #2891. + + :param warning: the warning instance. + :param hook: the hook caller + :param stacklevel: stacklevel forwarded to warnings.warn + """ + with warnings.catch_warnings(record=True) as records: + warnings.simplefilter("always", type(warning)) + warnings.warn(warning, stacklevel=stacklevel) + hook.pytest_warning_captured.call_historic( + kwargs=dict(warning_message=records[0], when="config", item=None) + ) diff --git a/venv/lib/python2.7/site-packages/_ruamel_yaml.so b/venv/lib/python2.7/site-packages/_ruamel_yaml.so new file mode 100755 index 0000000..220ac9e Binary files /dev/null and b/venv/lib/python2.7/site-packages/_ruamel_yaml.so differ diff --git a/venv/lib/python2.7/site-packages/_scandir.so b/venv/lib/python2.7/site-packages/_scandir.so new file mode 100755 index 0000000..b131754 Binary files /dev/null and b/venv/lib/python2.7/site-packages/_scandir.so differ diff --git a/venv/lib/python2.7/site-packages/_thread/__init__.py b/venv/lib/python2.7/site-packages/_thread/__init__.py new file mode 100644 index 0000000..9f2a51c --- /dev/null +++ b/venv/lib/python2.7/site-packages/_thread/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import +import sys +__future_module__ = True + +if sys.version_info[0] < 3: + from thread import * +else: + raise ImportError('This package should not be accessible on Python 3. ' + 'Either you are trying to run from the python-future src folder ' + 'or your installation of python-future is corrupted.') diff --git a/venv/lib/python2.7/site-packages/_yaml/__init__.py b/venv/lib/python2.7/site-packages/_yaml/__init__.py new file mode 100644 index 0000000..7baa8c4 --- /dev/null +++ b/venv/lib/python2.7/site-packages/_yaml/__init__.py @@ -0,0 +1,33 @@ +# This is a stub package designed to roughly emulate the _yaml +# extension module, which previously existed as a standalone module +# and has been moved into the `yaml` package namespace. +# It does not perfectly mimic its old counterpart, but should get +# close enough for anyone who's relying on it even when they shouldn't. +import yaml + +# in some circumstances, the yaml module we imoprted may be from a different version, so we need +# to tread carefully when poking at it here (it may not have the attributes we expect) +if not getattr(yaml, '__with_libyaml__', False): + from sys import version_info + + exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError + raise exc("No module named '_yaml'") +else: + from yaml._yaml import * + import warnings + warnings.warn( + 'The _yaml extension module is now located at yaml._yaml' + ' and its location is subject to change. To use the' + ' LibYAML-based parser and emitter, import from `yaml`:' + ' `from yaml import CLoader as Loader, CDumper as Dumper`.', + DeprecationWarning + ) + del warnings + # Don't `del yaml` here because yaml is actually an existing + # namespace member of _yaml. + +__name__ = '_yaml' +# If the module is top-level (i.e. not a part of any specific package) +# then the attribute should be set to ''. +# https://docs.python.org/3.8/library/types.html +__package__ = '' diff --git a/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/INSTALLER b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/METADATA b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/METADATA new file mode 100644 index 0000000..32dee9b --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/METADATA @@ -0,0 +1,195 @@ +Metadata-Version: 2.1 +Name: argusclient +Version: 1.2 +Summary: Minimal client library for Argus webservice REST API +Home-page: https://github.com/SalesforceEng/argusclient +Author: Hari Krishna Dara +Author-email: hdara@salesforce.com +License: BSD-3-Clause +Keywords: argus +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python :: 2.7 +Classifier: License :: OSI Approved :: BSD License +Classifier: Intended Audience :: Developers +Classifier: Topic :: System :: Monitoring +Description-Content-Type: text/x-rst +Requires-Dist: requests (>=2.9.1) +Requires-Dist: lxml (>=3.2.3) + +argusclient -- A minimal client library for Argus webservice +------------------------------------------------------------ + +This is a minimal and thin layer of Python client code on top of the +Argus webservices REST API. Most of the library API is 1:1 with that of +REST API so it serves to be more of a convenience than an abstraction. +This means you still need to be familiar with the underlying REST API to +be effective. For more information on the REST API and data model, refer +to the `Argus - User +Guide `__. +Special thanks to `Demian Brecht `__ +for giving a lot of feedback early and helping to shape the API and the +project. + +You can also browse the Python API documentation online at: ``__ + +A quick primer to using argusclient +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Below you will find functional and self-explanatory code that shows how +to do the following: + +- Import the relevant pieces from argusclient +- Create the main entry point and establish login session +- Query for existing namespaces +- Create a new namespace +- Collect metrics and annotations +- Post metrics and annotations +- Query for existing dashboards +- Update or Create dashboard +- Query for existing alerts +- Delete alert +- Create an alert along with a trigger and a notification + +In addition, also look at the bundled example named +``splunk_to_argus.py`` that shows how to extract metrics from Splunk and +push them to Argus. + +Some package imports and initializations that we use later +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + import sys, os, time, calendar, getpass, logging, random + import lxml.etree + + from argusclient import * + from argusclient.dashboardtags import DASHBOARD, CHART, TITLE, METRIC, FLAGS + + logging.basicConfig() + logging.root.setLevel(logging.INFO) + logging.getLogger("requests").setLevel(logging.WARN) + + endpoint = "http://localhost:8080/argusws" + user = "hdara" + password = None + + tags = { "host": "hdara-wsl" } + fields = { "user": user } + curtm = long(calendar.timegm(time.gmtime()))*1000 + ns_name = "hdara-ns" + ns_access_addl_users = ("hdara",) + dashboard_name = "hdara.test.dashboard" + alert_name = "hdara.test.alert" + scope_name = "hdara" + metric_name = "test" + ans = [] + +Login to the service and establish session +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + argus = ArgusServiceClient(user, + password or getpass.getpass("SSO password for %s: " % user), + endpoint=endpoint) + logging.info("Logging in") + argus.login() + +Check if a namespace exists and create one if missing +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + logging.info("Looking up existing namespace with name: %s", ns_name) + nss = dict((ns.qualifier, ns) for ns in argus.namespaces.values()) + ns = nss.get(ns_name) + if not ns: + logging.info("Creating new namespace with name: %s", ns_name) + ns = argus.namespaces.add(Namespace(ns_name)) + +Generate some random metrics against hdara-ns:hdara:test and mark the start and end with annotations. +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + logging.info("Generating some metric and annotation data for the dashboard") + m = Metric(scope_name, metric_name, tags=tags, namespace=ns_name) + for t in xrange(10, 0, -1): + # Warden requires 1 minute gap between successive data points. + ts = curtm-t*60*1000 + m.datapoints[ts] = random.randint(50, 100) + if not ans or t == 1: + ans.append(Annotation("script", "hdara", "test", ts, ts, "generated", tags=tags, fields=dict(event=ans and "start" or "end", **fields))) + +Send metrics and annotations to Argus +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + logging.info("Adding metrics data to Argus") + am_resp = argus.metrics.add([m]); + if am_resp.error_count(): + logging.error("Errors reported in metric data: errorCount: %s errorMessages: %s", am_resp.error_count(), am_resp.error_messages()) + logging.info("Adding annotation data to Argus") + an_resp = argus.annotations.add(ans) + if an_resp.error_count(): + logging.error("Errors reported in annotation data: errorCount: %s errorMessages: %s", an_resp.error_count(), an_resp.error_messages()) + +Generate dashboard content +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + mquery = str(MetricQuery(scope_name, metric_name, "sum", tags=tags, stTimeSpec="-1d", enTimeSpec="-0d", namespace=ns_name)) + aquery = str(AnnotationQuery(scope_name, metric_name, "generated", tags=tags, stTimeSpec="-1d", enTimeSpec="-0d")) + content = lxml.etree.tostring(DASHBOARD( + CHART( + TITLE("hdara.test"), + METRIC(mquery, name="hdara.test.metric"), + FLAGS(aquery, name="hdara.test.annotation"), + name="Chart" + ) + ), method="html") + dashbobj.content = content + +Update or Create dashboard +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + logging.info("Looking up existing dashboard with name: %s", dashboard_name) + dashbobj = argus.dashboards.get_user_dashboard(user, dashboard_name, shared=False) + if not dashbobj: + logging.info("Creating new dashboard with name: %s", dashboard_name) + dashbobj = Dashboard(dashboard_name, content, shared=True, description="A new dashboard") + dashbobj = argus.dashboards.add(dashbobj) + else: + logging.info("Updating dashboard with name: %s id %s", dashboard_name, dashbobj.argus_id) + dashbobj.content = content + argus.dashboards.update(dashbobj.argus_id, dashbobj) + logging.info("Dashboard url: %s", os.path.join(os.path.dirname(endpoint), "argus/#/dashboards", str(dashbobj.argus_id)).replace("-ws", "-ui")) + +Look for an existing alert and delete it so that we can recreate it +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + logging.info("Looking up existing alert with name: %s owned by user: %s", alert_name, user) + alertobj = argus.alerts.get_user_alert(user, alert_name, shared=False) + if alertobj: + logging.info("Deleting existing alert with name: %s id: %s", alert_name, alertobj.argus_id) + argus.alerts.delete(alertobj.argus_id) + +Finally, create alert with a trigger and a notification +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + logging.info("Creating new alert with alert name: %s", alert_name) + alertobj = argus.alerts.add(Alert(alert_name, mquery, "* */1 * * *", + trigger=Trigger("hdara.test.trigger", Trigger.GREATER_THAN, 100000, 600000), + notification=Notification("hdara.test.notification", Notification.EMAIL, subscriptions=["hdara@salesforce.com"]), + shared=True)) + + diff --git a/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/RECORD b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/RECORD new file mode 100644 index 0000000..30a84eb --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/RECORD @@ -0,0 +1,16 @@ +argusclient-1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +argusclient-1.2.dist-info/METADATA,sha256=Ft4DmagGYMeIbKiL0_XKvl0YonDXN66BwRlKS5Fg0t0,7584 +argusclient-1.2.dist-info/RECORD,, +argusclient-1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +argusclient-1.2.dist-info/WHEEL,sha256=bK8TJl-oUKFDa18qkB68zwTZhIBCifqi4qjS_NS4aFQ,92 +argusclient-1.2.dist-info/entry_points.txt,sha256=FjKvgvGyIyqokCxiGIVCvaGmkWcXwEXZ8W6zk1mtvZQ,37 +argusclient-1.2.dist-info/top_level.txt,sha256=2sL6dps7f8axrTSXHByLdxGdHzMwvORIhSvcZYgW1_M,12 +argusclient-1.2.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +argusclient/__init__.py,sha256=CIAg4qNKDDU6uIlZIs1WCjq2ESSQ7leHKZ_rNBfNDyg,474 +argusclient/__init__.pyc,, +argusclient/client.py,sha256=9qZnfhy7SHGF5g_9doe3HXFWaaNkHyQTuduVHHNh_2I,42259 +argusclient/client.pyc,, +argusclient/dashboardtags.py,sha256=rv09SR5Eya8VBWAdPDIP2G1cp4xjgs8V86OPhH4Hq10,4991 +argusclient/dashboardtags.pyc,, +argusclient/model.py,sha256=aSWJ62WM3JDdksfKVFHkuWq1AJNCo89YXQqd3Fs3jrE,18491 +argusclient/model.pyc,, diff --git a/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/REQUESTED b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/WHEEL b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/WHEEL new file mode 100644 index 0000000..60b427d --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any + diff --git a/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/entry_points.txt b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/entry_points.txt new file mode 100644 index 0000000..5d3e5f6 --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/entry_points.txt @@ -0,0 +1,3 @@ + + # -*- Entry points: -*- + \ No newline at end of file diff --git a/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/top_level.txt b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/top_level.txt new file mode 100644 index 0000000..c56aa30 --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/top_level.txt @@ -0,0 +1 @@ +argusclient diff --git a/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/zip-safe b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient-1.2.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/venv/lib/python2.7/site-packages/argusclient/__init__.py b/venv/lib/python2.7/site-packages/argusclient/__init__.py new file mode 100644 index 0000000..2cabd66 --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient/__init__.py @@ -0,0 +1,10 @@ + +# +# Copyright (c) 2016, salesforce.com, inc. +# All rights reserved. +# Licensed under the BSD 3-Clause license. +# For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause +# + +from .client import ArgusServiceClient, ArgusException, ArgusAuthException, ArgusObjectNotFoundException, MetricQuery, AnnotationQuery +from .model import Namespace, Metric, Annotation, Dashboard, Alert, Trigger, Notification, User, AddListResult diff --git a/venv/lib/python2.7/site-packages/argusclient/client.py b/venv/lib/python2.7/site-packages/argusclient/client.py new file mode 100644 index 0000000..a981b8d --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient/client.py @@ -0,0 +1,955 @@ +""" +This modules contains various client classes to interact with the Argus RESTful webservice endpoints. +The implementation is based on API documentation from ``/help`` on various endpoints +and `web service reference `__. +""" + +# +# Copyright (c) 2016, salesforce.com, inc. +# All rights reserved. +# Licensed under the BSD 3-Clause license. +# For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause +# +import unicodedata +from collections import Mapping + +import requests +import json +import os +import logging +import collections +try: + import http.client as httplib # Python 3 +except ImportError: + import httplib # Python 2 +from functools import wraps + +from .model import Namespace, Metric, Annotation, Dashboard, Alert, Trigger, Notification, JsonEncoder, JsonDecoder, \ + Permission + +REQ_METHOD = "req_method" +REQ_PATH = "req_path" +REQ_PARAMS = "req_params" +REQ_BODY = "req_body" + +class ArgusException(Exception): + """ + An exception type that is thrown for Argus service errors. + """ + pass + +class ArgusAuthException(ArgusException): + """ + An exception type that is thrown for Argus authentication errors. + """ + pass + +class ArgusObjectNotFoundException(ArgusException): + """ + An exception type that is thrown for Argus object not found errors. + """ + pass + +class BaseQuery(object): + def __init__(self, baseExpr, *tailParams, **kwargs): + self.baseExpr = baseExpr + self.stTimeSpec = kwargs.get("stTimeSpec", None) + self.enTimeSpec = kwargs.get("enTimeSpec", None) + self.tailParams = tuple([t for t in tailParams if t]) # Filter None's. + assert self.stTimeSpec or self.enTimeSpec, "One of start or end time specifications should be non-empty" + + def __str__(self): + """ + Return string representation of the query that can be used with an Argus query. A metric query has the format: + ``-1d:-0d:scope:metric[{tagk=tagv,...}]:downsample[:aggregator][:namespace]``. An annotation query has the format + ``-1d:-0d:scope:metric[{tagk=tagv,...}]:source``. + """ + query = ":".join(str(q) for q in (self.stTimeSpec, self.enTimeSpec, self.baseExpr) + self.tailParams if q) + return query + + def getQueryParams(self): + return dict(expression=str(self)) + + +class MetricQuery(BaseQuery): + """ + This class is used to construct the query string for sending metric queries to Argus. + + >>> from argusclient.client import MetricQuery + >>> mquery = MetricQuery("test.scope", "test.metric", "sum", tags={ "test.tag": "test.value" }, stTimeSpec="-1d", enTimeSpec="-0d", namespace="test.namespace") + >>> print str(mquery) + -1d:-0d:test.scope:test.metric{test.tag=test.value}:sum:test.namespace + """ + def __init__(self, scope, metric, aggregator, tags=None, namespace=None, downsampler=None, stTimeSpec=None, enTimeSpec=None): + # NOTE: Namespace no longer goes into the metric expression, so we pass it down as a tail parameter. + super(MetricQuery, self).__init__(str(Metric(scope, metric, tags=tags)), aggregator, downsampler, namespace, stTimeSpec=stTimeSpec, enTimeSpec=enTimeSpec) + + +class AnnotationQuery(BaseQuery): + """ + This class is used to construct the query string for sending annotations queries to Argus. + + >>> from argusclient.client import AnnotationQuery + >>> mquery = AnnotationQuery("test.scope", "test.metric", "test.source", tags={ "test.tag": "test.value" }, stTimeSpec="-1d", enTimeSpec="-0d") + >>> print str(mquery) + -1d:-0d:test.scope:test.metric{test.tag=test.value}:test.source + """ + def __init__(self, scope, metric, source, tags=None, stTimeSpec=None, enTimeSpec=None): + super(AnnotationQuery, self).__init__(str(Annotation(source, scope, metric, None, None, None, tags=tags)), stTimeSpec=stTimeSpec, enTimeSpec=enTimeSpec) + + +class BaseCollectionServiceClient(object): + def __init__(self, query_type, obj_type, argus, query_path, coll_path): + self.query_type = query_type + self.obj_type = obj_type + self.argus = argus + self.query_path = query_path + self.coll_path = coll_path + + def query(self, query): + """ + Returns the list of data matching the given query. + """ + if not query: raise ValueError("need a value for query parameter") + if not isinstance(query, self.query_type): raise TypeError("query needs to be of type: %s" % self.query_type) + return self.argus._request("get", self.query_path, params=query.getQueryParams()) + + def add(self, data): + """ + Sends data to the collection service. + + :return: :class:`argusclient.model.AddListResult` object with a summary of the operation. + """ + if not data: raise ValueError("need a value for data parameter") + if not isinstance(data, list) or not isinstance(data[0], self.obj_type): raise TypeError("data should be a list of %s objects" % self.obj_type) + return self.argus._request("post", self.coll_path, dataObj=data) + + +class MetricCollectionServiceClient(BaseCollectionServiceClient): + """ + Service class that interfaces with the Argus metrics collection endpoint. + + There is no need to instantiate this directly, as it is available as :attr:`argusclient.client.ArgusServiceClient.metrics` attribute. + """ + def __init__(self, argus): + super(MetricCollectionServiceClient, self).__init__(MetricQuery, Metric, argus, "metrics", "collection/metrics") + + +class AnnotationCollectionServiceClient(BaseCollectionServiceClient): + """ + Service class that interfaces with the Argus annotations collection endpoint. + + There is no need to instantiate this directly, as it is available as :attr:`argusclient.client.ArgusServiceClient.annotations` attribute. + """ + def __init__(self, argus): + super(AnnotationCollectionServiceClient, self).__init__(AnnotationQuery, Annotation, argus, "annotations", "collection/annotations") + + +class BaseModelServiceClient(object): + def __init__(self, argus, get_all_req_opts=None): + """ + :param get_all_req_opts: Dict holding request details for a 'get-all alerts/dashboards/etc' request. + Currently supported fields are REQ_METHOD, REQ_PATH, REQ_PARAMS, and REQ_BODY. + :type get_all_req_opts: dict + """ + self.argus = argus + self._retrieved_all = False + self._coll = {} + self.get_all_req_opts = get_all_req_opts or {} + + def _init_all(self, coll=None): + if not self.get_all_req_opts.get(REQ_PATH): + raise TypeError("Unsupported operation on: %s" % type(self)) + if not self._retrieved_all: + self._coll = dict((obj.argus_id, self._fill(obj)) + for obj in (coll or self.argus._request(self.get_all_req_opts.get(REQ_METHOD, "get"), + self.get_all_req_opts.get(REQ_PATH, None), + params=self.get_all_req_opts.get(REQ_PARAMS, None), + dataObj=self.get_all_req_opts.get(REQ_BODY, None)))) + self._retrieved_all = True + + def _fill(self, obj): + return obj + + def get(self, id): + """ + Return the object for the specified id. If the object is not already in the local collection, a one-time attempt would be made to load all objects from Argus. + """ + # By default, load all, unless the id is already there. + if id not in self._coll: + self._init_all() + return self._coll[id] + + def update(self, key, value): + raise TypeError("Unsupported operation on: %s" % type(self)) + + def delete(self, key): + raise TypeError("Unsupported operation on: %s" % type(self)) + + def items(self): + """ + Returns the list of (id, object) pairs as tuples, works like the corresponding method on a dict. + Calling this method may result in sending a request to Argus to fetch all relevant objects. + """ + self._init_all() + return self._coll.items() + + def keys(self): + """ + Returns the list of ids, just like the corresponding method on a dict. + Calling this method may result in sending a request to Argus to fetch all relevant objects. + """ + self._init_all() + return self._coll.items() + + def values(self): + """ + Returns the list of objects, just like the corresponding method on a dict. + Calling this method may result in sending a request to Argus to fetch all relevant objects. + """ + self._init_all() + return self._coll.values() + + def __iter__(self): + """ + Returns an iterator of the keys, just like the corresponding method on a dict. + Calling this method may result in sending a request to Argus to fetch all relevant objects. + """ + self._init_all() + return iter(self._coll) + + def __len__(self): + """ + Returns the number of objects, just like the corresponding method on a dict. + Calling this method may result in sending a request to Argus to fetch all relevant objects. + """ + self._init_all() + return len(self._coll) + + def __getitem__(self, id): + return self.get(id) + + def __setitem__(self, key, value): + raise ValueError("You can't modify this list directly, use the add(), delete() and update() methods instead") + + def __delitem__(self, key): + raise ValueError("You can't modify this list directly, use the add(), delete() and update() methods instead") + + def __contains__(self, key): + self._init_all() + return key in self._coll + + +class UsersServiceClient(BaseModelServiceClient): + """ + Service class that interfaces with the Argus users endpoint. + + There is no need to instantiate this directly, as it is available as :attr:`argusclient.client.ArgusServiceClient.users` attribute. + """ + def __init__(self, argus): + super(UsersServiceClient, self).__init__(argus) + self._coll_by_name = {} + + def get(self, key): + """ + Return the User for the specified id/username. If the object is not already in the local collection, an attempt will be made to retrieve it from Argus. + """ + if not key: raise ValueError("Need username or id") + if isinstance(key, int) or key.isdigit(): + id = int(key) + if id not in self._coll: + u = self._fill(self.argus._request("get", "users/id/%s" % id)) + self._coll[id] = u + self._coll_by_name[u.userName] = u + return self._coll[id] + else: + if key not in self._coll_by_name: + u = self._fill(self.argus._request("get", "users/username/%s" % key)) + self._coll_by_name[key] = u + self._coll[u.id] = u + return self._coll_by_name[key] + + +class NamespacesServiceClient(BaseModelServiceClient): + """ + Service class that interfaces with the Argus namespaces endpoint. + + There is no need to instantiate this directly, as it is available as :attr:`argusclient.client.ArgusServiceClient.namespaces` attribute. + """ + def __init__(self, argus): + super(NamespacesServiceClient, self).__init__(argus, get_all_req_opts={REQ_PATH: "namespace"}) + + def update(self, id, namespace): + """ + Updates the specified namespace. + + :return: the updated :class:`argusclient.model.Namespace` with all fields populated. + """ + if not id: raise ValueError("Need to specify an id to update namespace") + id = int(id) + if not namespace.argus_id: raise ValueError("Namespace needs an id to update") + if id != namespace.argus_id: raise ValueError("Namespace id: %s doesn't match the id: %s that you are updating" % (namespace.id, id)) + self._coll[id] = self.argus._request("put", "namespace/%s" % id, dataObj=namespace) + return self._coll[id] + + def add(self, namespace): + """ + Adds the namespace. + + :return: the new :class:`argusclient.model.Namespace` with all fields populated. + """ + if not isinstance(namespace, Namespace): raise TypeError("Need a Namespace object, got: %s" % type(namespace)) + if namespace.argus_id: raise ValueError("A new namespace can't have an id") + ns = self._fill(self.argus._request("post", "namespace", dataObj=namespace)) + self._coll[ns.id] = ns + return ns + + def update_users(self, namespaceid, *users): + """ + Updates the namespace with the specified users. + + :return: the updated :class:`argusclient.model.Namespace` with all fields populated. + """ + if not namespaceid: raise ValueError("Need to specify a namespaceid") + self._coll[namespaceid] = self.argus._request("put", "namespace/%s/users" % namespaceid, dataObj=users) + return self._coll[namespaceid] + + +class BaseUpdatableModelServiceClient(BaseModelServiceClient): + def __init__(self, objType, argus, id_path, get_all_req_opts=None): + super(BaseUpdatableModelServiceClient, self).__init__(argus, get_all_req_opts=get_all_req_opts) + self.objType = objType + self.id_path = id_path + + def get(self, id): + """ + Gets the item with specified id. This method retrieves it from Argus, if the object is not already available in the local collection. + """ + if id is None: raise ValueError("Need to specify an id to get item") + id = int(id) + if id not in self._coll: + self._coll[id] = self._fill(self.argus._request("get", self.id_path % id)) + return self._coll[id] + + def update(self, id, obj): + """ + Updates the specified item on Argus as well as in the local collection. + + :return: the updated object with all fields populated. + """ + if not id: raise ValueError("Need to specify an id to update item") + id = int(id) + if not isinstance(obj, self.objType): raise TypeError("Need an object of type: %s" % self.objType) + if not obj.argus_id: raise ValueError("Object needs an id to update") + # Ensure that user doesn't accidentally copy another item. + if id != obj.argus_id: raise ValueError("Object id: %s doesn't match the id: %s that you are updating" % (obj.id, id)) + self._coll[id] = self.argus._request("put", self.id_path % id, dataObj=obj) + return self._coll[id] + + def delete(self, id): + """ + Deletes the object from Argus service and also from this collection (if exists). + """ + if not id: raise ValueError("Need to specify an id to delete item") + id = int(id) + self.argus._request("delete", self.id_path % id) + if id in self._coll: + del self._coll[id] + + +class DashboardsServiceClient(BaseUpdatableModelServiceClient): + """ + Service class that interfaces with the Argus dashboards endpoint. + + There is no need to instantiate this directly, as it is available as :attr:`argusclient.client.ArgusServiceClient.dashboards` attribute. + """ + def __init__(self, argus, get_all_req_opts=None): + """ + :param get_all_req_opts: See BaseModelServiceClient.__init__() for description. + """ + if not get_all_req_opts: + get_all_req_opts = {} + get_all_req_opts.setdefault(REQ_PATH, "dashboards") + super(DashboardsServiceClient, self).__init__(Dashboard, argus, id_path="dashboards/%s", + get_all_req_opts=get_all_req_opts) + + def add(self, dashboard): + """ + Adds the dashboard. + + :return: the :class:`argusclient.model.Dashboard` object with all fields populated. + """ + if not isinstance(dashboard, Dashboard): raise TypeError("Need a Dashboard object, got: %s" % type(dashboard)) + if dashboard.argus_id: raise ValueError("A new dashboard can't have an id") + db = self._fill(self.argus._request("post", "dashboards", dataObj=dashboard)) + self._coll[db.id] = db + return db + + def get_user_dashboard(self, ownerName, dashboardName, shared=True): + """ + Looks up a dashboard with its name and owner. Returns `None` if not found. + + :return: the :class:`argusclient.model.Dashboard` object with all fields populated. + """ + assert dashboardName, "Expected a dashboard name" + assert ownerName, "Expected a owner name" + dashboards = self.argus._request("get", "dashboards", params=dict(dashboardName=dashboardName, owner=ownerName, shared=shared)) + if not dashboards: + return None + else: + assert len(dashboards) == 1, "Expected a single dashboard as a result, but got: %s" % len(dashboards) + return dashboards[0] + + def get_user_dashboards(self, ownerName=None, shared=True, limit=None, version=None): + """ + Gets dashboards owned by ownerName. + If ownerName is not passed in, the username used during login is used. + + :return: a list of :class:`argusclient.model.Dashboard` objects with all fields populated. + """ + return self.argus._request("get", "dashboards", params=dict(owner=ownerName, shared=shared, limit=limit, version=version)) + +class PermissionsServiceClient(BaseUpdatableModelServiceClient): + """ + Service class that interfaces with the Argus permissions endpoint. + + There is no need to instantiate this directly, as it is available as :attr:`argusclient.client.ArgusServiceClient.permissions` attribute. + """ + def __init__(self, argus, get_all_req_opts=None): + """ + :param get_all_req_opts: See BaseModelServiceClient.__init__() for description. + """ + if not get_all_req_opts: + get_all_req_opts = {} + get_all_req_opts.setdefault(REQ_METHOD, "get") + get_all_req_opts[REQ_PATH] = "permission/" + get_all_req_opts.get(REQ_PATH, "") + super(PermissionsServiceClient, self).__init__(Permission, argus, id_path="permission/%s", + get_all_req_opts=get_all_req_opts) + + def _init_all(self, coll=None): + if not self.get_all_req_opts.get(REQ_PATH): + raise TypeError("Unsupported operation on: %s" % type(self)) + if not self._retrieved_all: + resp = convert(self.argus._request(self.get_all_req_opts.get(REQ_METHOD, "get"), + self.get_all_req_opts.get(REQ_PATH, None), + params=self.get_all_req_opts.get(REQ_PARAMS, None), + dataObj=self.get_all_req_opts.get(REQ_BODY, None))) + for id, perms in resp.items(): + self._coll[id] = perms + self._retrieved_all = True + + def get_permissions_for_entities(self, entityIds): + """ + Gets permissions that are associated with the given entity id's. + + :return: a dict of entity id's mapped to a list of :class:`argusclient.model.Permission` objects + """ + return convert(self.argus._request("post", "permission/entityIds", dataObj=entityIds)) + +def convert(input): + if isinstance(input, Mapping): + return {convert(key): convert(value) for key, value in input.iteritems()} + elif isinstance(input, list): + return [convert(element) for element in input] + elif isinstance(input, basestring): + ret = str(input) + if ret.isdigit(): + ret = int(ret) + return ret + else: + return input + + +class AlertsServiceClient(BaseUpdatableModelServiceClient): + """ + Service class that interfaces with the Argus alerts endpoint. + + There is no need to instantiate this directly, as it is available as :attr:`argusclient.client.ArgusServiceClient.alerts` attribute. + + .. attribute:: triggers + + :class:`argusclient.client.AlertTriggersServiceClient` + + Interfaces with the Argus alert triggers endpoint. + + .. attribute:: notifications + + :class:`argusclient.client.AlertNotificationsServiceClient` + + Interfaces with the Argus alert notifications endpoint. + + """ + def __init__(self, argus, get_all_req_opts=None): + """ + :param get_all_req_opts: See BaseModelServiceClient.__init__() for description. + """ + if not get_all_req_opts: + get_all_req_opts = {} + get_all_req_opts[REQ_PATH] = "alerts/" + get_all_req_opts.get(REQ_PATH, "") + super(AlertsServiceClient, self).__init__(Alert, argus, id_path="alerts/%s", get_all_req_opts=get_all_req_opts) + + def _fill(self, alert): + alert._triggers = AlertTriggersServiceClient(self.argus, alert) + alert._notifications = AlertNotificationsServiceClient(self.argus, alert) + return alert + + def add(self, alert): + """ + Adds the alert. + + :return: the :class:`argusclient.model.Alert` object with all fields populated. + """ + if not isinstance(alert, Alert): raise TypeError("Need a Alert object, got: %s" % type(alert)) + if alert.argus_id: raise ValueError("A new alert can't have an id") + alertobj = self._fill(self.argus._request("post", "alerts", dataObj=alert)) + self._coll[alertobj.id] = alertobj + if alert.trigger: + alertobj.trigger = alertobj.triggers.add(alert.trigger) + if alert.notification: + alertobj.notification = alertobj.notifications.add(alert.notification) + if alert.trigger and alert.notification: + self.argus.alerts.add_notification_trigger(alertobj.id, alertobj.notification.id, alertobj.trigger.id) + alertobj.notification.triggersIds = [alertobj.trigger.id] + alertobj.trigger.notificationsIds = [alertobj.notification.id] + return alertobj + + def update(self, id, alert): + """ + Updates the specified alert. + + :return: the updated :class:`argusclient.model.Alert` object with all fields populated. + """ + return self._fill(super(AlertsServiceClient, self).update(id, alert)) + + def get_notification_triggers(self, alertid, notificationid): + """ + Return all triggers that are associated with the specified notification as a list. + + :return: the :class:`list` of :class:`argusclient.model.Trigger` object with all fields populated. + """ + if not alertid: raise ValueError("Need to specify an alertid") + if not notificationid: raise ValueError("Need to specify a notificationid") + # TODO: Update self._coll + return self.argus._request("get", "alerts/%s/notifications/%s/triggers" % (alertid, notificationid)) + + def get_notification_trigger(self, alertid, notificationid, triggerid): + """ + Returns the trigger only if it is associated with the specified notification. + + :return: the :class:`argusclient.model.Trigger` object with all fields populated. + """ + if not alertid: raise ValueError("Need to specify an alertid") + if not notificationid: raise ValueError("Need to specify a notificationid") + if not triggerid: raise ValueError("Need to specify a triggerid") + # TODO: Update self._coll + return self.argus._request("get", "alerts/%s/notifications/%s/triggers/%s" % (alertid, notificationid, triggerid)) + + def add_notification_trigger(self, alertid, notificationid, triggerid): + """ + Associates the trigger with the specified notification. + + :return: the :class:`argusclient.model.Trigger` with all fields populated. + """ + if not alertid: raise ValueError("Need to specify an alertid") + if not notificationid: raise ValueError("Need to specify a notificationid") + if not triggerid: raise ValueError("Need to specify a triggerid") + # TODO: Update self._coll + return self.argus._request("post", "alerts/%s/notifications/%s/triggers/%s" % (alertid, notificationid, triggerid)) + + def delete_notification_trigger(self, alertid, notificationid, triggerid): + """ + Disassociates the trigger with the specified notification. This method has no return value. + """ + if not alertid: raise ValueError("Need to specify an alertid") + if not notificationid: raise ValueError("Need to specify a notificationid") + if not triggerid: raise ValueError("Need to specify a triggerid") + # TODO: Update self._coll + self.argus._request("delete", "alerts/%s/notifications/%s/triggers/%s" % (alertid, notificationid, triggerid)) + + def get_user_alert(self, ownerName, alertName, shared=True): + """ + Looks up an alert with its name and owner. Returns `None` if not found. + + :return: the :class:`argusclient.model.Alert` object with all fields populated. + """ + assert alertName, "Expected an alert name" + assert ownerName, "Expected a owner name" + alerts = self.argus._request("get", "alerts/meta", params=dict(alertname=alertName, ownername=ownerName, shared=shared)) + if not alerts: + return None + else: + assert len(alerts) == 1, "Expected a single alert as a result, but got: %s" % [a.name for a in alerts] + return alerts[0] + + def get_alerts_allinfo(self, ownerName=None, alertNameContains=None, shared=False, limit=None): + """ + If ownerName is not passed in, the username used during login is used. + Calls the GET /alerts/allinfo endpoint. + Returns the list of alerts (including associated notifications and triggers) created by the user. + + :return: the list of :class:`argusclient.model.Alert` objects, with all fields populated, including triggers and notifications + """ + return self.argus._request("get", "alerts/allinfo", params=dict(ownername=ownerName, alertNameContains=alertNameContains, shared=shared, limit=limit)) + + ''' + Functions to enable support for composite alerts + ''' + + def get_composite_alert_children(self, comp_alert_id): + """ + Get list of child alerts for a composite alert + :param comp_alert_id: ID of an argus composite alert + :type comp_alert_id: integer + :return: list of :class:`argusclient.model.Alert` object with all fields populated. + """ + + if not comp_alert_id: raise ValueError("Need to specify comp_alert_id") + if not isinstance(comp_alert_id, int): raise TypeError("Need an Alert ID, got: %s" % type(comp_alert_id)) + + uri_path = "alerts/{}/children".format(comp_alert_id) + child_alerts = self.argus._request("get", uri_path) + child_alerts = [self._fill(child_alert) for child_alert in child_alerts] + return child_alerts + + def get_composite_alert_children_info(self, comp_alert_id): + """ + Get information for all children (child alerts + triggers associated with them) of a composite alert + + :param comp_alert_id: ID of an argus composite alert + :type comp_alert_id: integer + :return: list of child alerts information (alertid, alertname, triggerids, triggernames etc) + """ + + if not comp_alert_id: raise ValueError("Need to specify comp_alert_id") + if not isinstance(comp_alert_id, int): raise TypeError("Need an Alert ID, got: %s" % type(comp_alert_id)) + + uri_path = "alerts/{}/children/info".format(comp_alert_id) + return self.argus._request("get", uri_path) + + + def add_child_alert_to_composite_alert(self, comp_alert_id, alert): + """ + Add child alert to a composite alert + + :param comp_alert_id: ID of a composite alert + :type comp_alert_id: Integer + + :param alert: alert definition + :type alert: class:`argusclient.model.Alert` object + + :return: newly created child alert object of type class:`argusclient.model.Alert` + """ + if not comp_alert_id: raise ValueError("Need to specify a composite alert id") + if not alert: raise ValueError("Need to specify an Alert object") + if not isinstance(comp_alert_id, int): raise TypeError("Need an Alert ID, got: %s" % type(comp_alert_id)) + if not isinstance(alert, Alert): raise TypeError("Need an Alert object, got: %s" % type(alert)) + + uri_path = "alerts/{}/children".format(comp_alert_id) + alert_obj = self._fill(self.argus._request("post", uri_path, dataObj=alert)) + self._coll[alert_obj.id] = alert_obj + return alert_obj + + + def delete_child_alert_from_composite_alert(self, comp_alert_id, child_alert_id): + """ + Delete a child alert from a composite alert + + :param comp_alert_id: ID of a composite alert + :type comp_alert_id: Integer + + :param child_alert_id: ID of a child alert + :type child_alert_id: Integer + """ + if not comp_alert_id: raise ValueError("Need to specify a composite alert id") + if not child_alert_id: raise ValueError("Need to specify a child alert id") + if not isinstance(comp_alert_id, int): raise TypeError("Need a composite Alert ID, got: %s" % type(comp_alert_id)) + if not isinstance(child_alert_id, int): raise TypeError("Need an Alert ID, got: %s" % type(child_alert_id)) + + uri_path = "alerts/{}/children/{}".format(comp_alert_id, child_alert_id) + if child_alert_id in self._coll: + del self._coll[child_alert_id] + return self.argus._request("delete", uri_path) + + +class AlertTriggersServiceClient(BaseUpdatableModelServiceClient): + """ + Service class that interfaces with the Argus alert triggers endpoint. + + There is no need to instantiate this directly, as it is available as :attr:`argusclient.client.AlertsServiceClient.triggers` attribute. + """ + + def __init__(self, argus, alert): + assert alert, "Expected an alert at this point" + assert alert.id, "Alert expected to have an id at this point" + super(AlertTriggersServiceClient, self).__init__(Trigger, argus, id_path="alerts/%s/triggers/%%s" % alert.id, + get_all_req_opts={REQ_PATH: "alerts/%s/triggers" % alert.id}) + self.alert = alert + if alert.triggers: + self._init_all(alert.triggers) + + def add(self, trigger): + """ + Adds the trigger to this alert. + + :return: the added :class:`argusclient.model.Trigger` with all fields populated. + """ + if not isinstance(trigger, Trigger): raise TypeError("Need a Trigger object, got: %s" % type(trigger)) + if trigger.argus_id: raise ValueError("A new Trigger can't have an id") + triggers = self.argus._request("post", "alerts/%s/triggers" % self.alert.id, dataObj=trigger) + self._init_all(triggers) + self.alert.triggerIds = [t.argus_id for t in triggers] + try: + return next(t for t in triggers if t.name == trigger.name) + except StopIteration: + raise ArgusException("This is unexpected... trigger: %s not found after successfully adding it" % trigger.name) + + def delete(self, id): + super(AlertTriggersServiceClient, self).delete(id) + self.alert.triggerIds = list(self._coll.keys()) + + +class AlertNotificationsServiceClient(BaseUpdatableModelServiceClient): + """ + Service class that interfaces with the Argus alert notifications endpoint. + + There is no need to instantiate this directly, as it is available as :attr:`argusclient.client.AlertsServiceClient.notifications` attribute. + """ + def __init__(self, argus, alert): + assert alert, "Expected an alert at this point" + assert alert.id, "Alert expected to have an id at this point" + super(AlertNotificationsServiceClient, self).__init__(Notification, argus, id_path="alerts/%s/notifications/%%s" % alert.id, + get_all_req_opts={REQ_PATH: "alerts/%s/notifications" % alert.id}) + self.alert = alert + if alert.notifications: + self._init_all(alert.notifications) + + def add(self, notification): + """ + Adds the notification to this alert. + + :return: the added :class:`argusclient.model.Notification` with all fields populated. + """ + if not isinstance(notification, Notification): raise TypeError("Need a Notification object, got: %s" % type(notification)) + if notification.argus_id: raise ValueError("A new Notification can't have an id") + notifications = self.argus._request("post", "alerts/%s/notifications" % self.alert.id, dataObj=notification) + self._init_all(notifications) + self.alert.notificationIds = [n.argus_id for n in notifications] + try: + return next(n for n in notifications if n.name == notification.name) + except StopIteration: + raise ArgusException("This is unexpected... notification: %s not found after successfully adding it" % notification.name) + + def delete(self, id): + super(AlertNotificationsServiceClient, self).delete(id) + self.alert.notificationIds = list(self._coll.keys()) + + +def retry_auth(f): + @wraps(f) + def with_retry(*args, **kwargs): + try_cnt = 0 + while True: + try_cnt += 1 + try: + return f(*args, **kwargs) + except ArgusAuthException as ex: + if try_cnt >= 2: + raise + else: + logging.debug("Got auth exception, but will retry", exc_info=True) + + return with_retry + + +def auto_auth(f): + @wraps(f) + def with_auth_token(*args, **kwargs): + argus = args[0] + if not argus.accessToken and argus.refreshToken: + try: + res = argus._request_no_auth("post", + "v2/auth/token/refresh", + dataObj=dict(refreshToken=argus.refreshToken)) + argus.accessToken = res["accessToken"] + except ArgusAuthException: + if argus.password: + logging.debug("Token refresh failed, will attempt a fresh login", exc_info=True) + else: + raise + if not argus.accessToken and argus.password: + argus.refreshToken = None + res = argus._request_no_auth("post", + "v2/auth/login", + dataObj=dict(username=argus.user, password=argus.password)) + argus.refreshToken, argus.accessToken = res["refreshToken"], res["accessToken"] + + try: + return f(*args, **kwargs) + except ArgusAuthException: + argus.accessToken = None + raise + + return with_auth_token + + +class ArgusServiceClient(object): + """ + This is the main class to interact with the Argus webservice. + + An instance of this class comes with the below attributes to interact with the different Argus endpoints: + + .. attribute:: metrics + + :class:`argusclient.client.MetricCollectionServiceClient` + + Interfaces with the Argus metrics collection endpoint. + + .. attribute:: annotations + + :class:`argusclient.client.AnnotationCollectionServiceClient` + + Interfaces with the Argus annotations collection endpoint. + + .. attribute:: dashboards + + :class:`argusclient.client.DashboardsServiceClient` + + Interfaces with the Argus dashboards endpoint. + + .. attribute:: permissions + + :class:`argusclient.client.PermissionsServiceClient` + + Interfaces with the Argus permissions endpoint. + + .. attribute:: users + + :class:`argusclient.client.UsersServiceClient` + + Interfaces with the Argus users endpoint. + + .. attribute:: namespaces + + :class:`argusclient.client.NamespacesServiceClient` + + Interfaces with the Argus namespaces endpoint. + + .. attribute:: alerts + + :class:`argusclient.client.AlertsServiceClient` + + Interfaces with the Argus alerts endpoint. + + """ + + def __init__(self, user, password, endpoint, timeout=(10, 120), refreshToken=None, accessToken=None): + """ + Creates a new client object to interface with the Argus RESTful API. + + :param user: The username for Argus account. + :type user: str + :param password: The password for Argus account. This is optional, unless a valid ``refreshToken`` or ``accessToken`` is specified. The password will be used to generate a ``refreshToken`` and ``accessToken``. + :type password: str + :param endpoint: The Argus endpoint to be used + :type endpoint: str + :param timeout: The timeout(s) to be applied for connection and read. This is passed as is to the calls to ``requests``. For more information, see `Requests Timeout `__ + :type timeout: int or float or tuple + :param refreshToken: A token that can be used to generate an ``accessToken`` as and when needed. When the ``refreshToken`` expires, the ``password`` (if specified) will be used to generate a new token. + :type refreshToken: str + :param accessToken: A token that can be used to authenticate with Argus. If a ``refreshToken`` or ``password`` is specified, the ``accessToken`` will be refreshed as and when it is needed. + :type refreshToken: str + """ + if not user: + raise ValueError("A valid user must be specified") + if not any((password, refreshToken, accessToken)): + raise ValueError("One of these parameters must be specified: (password, refreshToken, accessToken)") + if not endpoint: + raise ValueError("Need a valid Argus endpoint URL") + + self.user = user + self.password = password + self.endpoint = endpoint + self.timeout = timeout + self.refreshToken = refreshToken + self.accessToken = accessToken + + self.metrics = MetricCollectionServiceClient(self) + self.annotations = AnnotationCollectionServiceClient(self) + self.dashboards = DashboardsServiceClient(self) + self.permissions = PermissionsServiceClient(self) + self.users = UsersServiceClient(self) + self.namespaces = NamespacesServiceClient(self) + self.alerts = AlertsServiceClient(self) + self.conn = requests.Session() + + def login(self): + """ + Logs into the Argus service and establishes required tokens. + The call to ``login()`` is optional, as a session will be established the first time it is required. + + :return: the :class:`argusclient.model.User` object. + """ + # Simply make a request and let it handle the authentication implicitly. + return self._request("get", "users/username/{user}".format(user=self.user)) + + def logout(self): + """ + Logs out of the Argus service and destroys the session. + """ + # The new V2 auth doesn't support a logout, so just clear the tokens. + #self._request("get", "auth/logout") + self.refreshToken = self.accessToken = None + + @retry_auth + @auto_auth + def _request(self, method, path, params=None, dataObj=None, encCls=JsonEncoder, decCls=JsonDecoder): + """ + This is the low level method used to make the underlying Argus requests. This ensures that all requests are fully authenticated. + + :param method: The HTTP method name as a string. Some valid names are: `get`, `post`, `put` and `delete`. + :type method: str + :param path: The Argus path on which the request needs to be made, e.g. `/auth/login` + :type path: str + """ + return self._request_no_auth(method, path, params, dataObj, encCls, decCls) + + def _request_no_auth(self, method, path, params=None, dataObj=None, encCls=JsonEncoder, decCls=JsonDecoder): + """ + This is the low level method used to make the underlying Argus requests. It is preferable to use :meth:`_request` method instead. + + :param method: The HTTP method name as a string. Some valid names are: `get`, `post`, `put` and `delete`. + :type method: str + :param path: The Argus path on which the request needs to be made, e.g. `/auth/login` + :type path: str + """ + url = os.path.join(self.endpoint, path) + req_method = getattr(self.conn, method) + data = dataObj and json.dumps(dataObj, cls=encCls) or None + logging.debug("%s request with params: %s data length %s on: %s", method.upper(), params, data and len(data) or 0, url) # Mainly for the sake of data length + # Argus seems to recognized "Accept" header for "application/json" and "application/ms-excel", but the former is the default. + headers = {"Content-Type": "application/json"} + if self.accessToken: + headers["Authorization"] = "Bearer "+self.accessToken + resp = req_method(url, data=data, params=params, + headers=headers, + timeout=self.timeout) + res = check_success(resp, decCls) + return res + + +def check_success(resp, decCls): + if resp.status_code == httplib.OK: + # DELETE has no response. + if not resp.text: + return None + res = resp.json(cls=decCls) + if isinstance(res, dict) and "status" in res and res["status"] != 200: + raise ArgusException(resp.text) + return res + elif resp.status_code == httplib.NOT_FOUND: + raise ArgusObjectNotFoundException("Object not found at endpoint: {} message: {}".format(resp.url, resp.text)) + elif resp.status_code == httplib.UNAUTHORIZED: + raise ArgusAuthException("Failed to authenticate at endpoint: %s message: %s" % (resp.url, resp.text)) + else: + # TODO handle this differently, as this is typically a more severe exception (see W-2830904) + raise ArgusException(resp.text) diff --git a/venv/lib/python2.7/site-packages/argusclient/dashboardtags.py b/venv/lib/python2.7/site-packages/argusclient/dashboardtags.py new file mode 100644 index 0000000..1be7311 --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient/dashboardtags.py @@ -0,0 +1,146 @@ +""" +This module provides functions for generating dashboard tags. These functions are simple wrappers on top of the lxml `ElementMaker` API. +These functions can be combined with additional HTML tags to produce the required XML to define dashboards. + + >>> import lxml.etree + >>> from argusclient.dashboardtags import E, DASHBOARD, CHART, TITLE, METRIC + >>> h1 = E.h1 + >>> hr = E.hr + >>> dashboard = DASHBOARD(h1("Test Dashboard"), hr(), CHART(TITLE("hdara.test"), METRIC("-1d:-0d:test.scope:test.metric:sum", name="hdara.test.metric"), name="Chart")) + >>> print lxml.etree.tostring(dashboard, pretty_print=True) + +

Test Dashboard

+
+ + + -1d:-0d:test.scope:test.metric:sum + +
+ >>> print lxml.etree.tostring(dashboard, method="html") +

Test Dashboard


-1d:-0d:test.scope:test.metric:sum
+ +Argus cant't handle auto-closed XML tags, so using "html" `method` is recommended. +""" + +# +# Copyright (c) 2016, salesforce.com, inc. +# All rights reserved. +# Licensed under the BSD 3-Clause license. +# For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause +# + +import lxml.builder + +#: Use this to create additional XML/HTML tags, e.g., `E.h1` will create the `

` tag +E = lxml.builder.ElementMaker() + +_DASHBOARD = getattr(E, "ag-dashboard") +_DATE = getattr(E, "ag-date") +_TEXT = getattr(E, "ag-text") +_SUBMIT = getattr(E, "ag-submit") +_CHART = getattr(E, "ag-chart") +_OPTION = getattr(E, "ag-option") +_METRIC = getattr(E, "ag-metric") +_FLAGS = getattr(E, "ag-flags") +_TABULAR = getattr(E, "ag-table") +_STATUS_INDICATOR = getattr(E, "ag-status-indicator") + + +def DASHBOARD(*args, **kwargs): + """ Generates an `ag-dashboard` tag. """ + return _DASHBOARD(*args, **kwargs) + + +def DATE(*args, **kwargs): + """ Generates an `ag-date` tag. """ + return _DATE(*args, **kwargs) + + +def TEXT(*args, **kwargs): + """ Generates an `ag-text` tag. """ + return _TEXT(*args, **kwargs) + + +def SUBMIT(*args, **kwargs): + """ Generates an `ag-submit` tag. """ + return _SUBMIT(*args, **kwargs) + + +def CHART(*args, **kwargs): + """ Generates an `ag-chart` tag. """ + return _CHART(*args, **kwargs) + + +def OPTION(*args, **kwargs): + """ Generates an `ag-option` tag. """ + return _OPTION(*args, **kwargs) + + +def METRIC(*args, **kwargs): + """ Generates an `ag-metric` tag. """ + return _METRIC(*args, **kwargs) + + +def FLAGS(*args, **kwargs): + """ Generates an `ag-flags` tag. """ + return _FLAGS(*args, **kwargs) + + +def TABULAR(*args, **kwargs): + """ Generates an `ag-table` tag. """ + return _TABULAR(*args, **kwargs) + + +def START_DATE(name="start", label="Start Date", default="-1d"): + """ Generates a `ag-date` tag with sensible defaults for `name`, `label` and `default` for specifying a start date. """ + return DATE(type="datetime", name=name, label=label, default=default) + + +def END_DATE(name="end", label="End Date", default="-0d"): + """ Generates a `ag-date` tag with sensible defaults for `name`, `label` and `default` for specifying end date. """ + return DATE(type="datetime", name=name, label=label, default=default) + + +def TEXT_BOX(name, label=None, default=None): + """ Generates a `ag-text` tag with sensible defaults for `type`, `name`, `label` and `default` for specifying text field. """ + return TEXT(type="text", name=name, label=label or name.capitalize(), default=default or "") + + +def TITLE(title): + """ Generates a `ag-option` tag with the specified `title`. """ + return OPTION(name="title.text", value=title) + + +def SUB_TITLE(subTitle): + """ Generates a `ag-option` tag with the specified `subtitle`. """ + return OPTION(name="subtitle.text", value=subTitle) + + +def YMIN(value): + """ Generates a `ag-option` tag with the specified yaxis.min value. """ + return OPTION(name="yaxis.min", value=value) + + +def YMAX(value): + """ Generates a `ag-option` tag with the specified yaxis.max value. """ + return OPTION(name="yaxis.max", value=value) + + +def XMIN(value): + """ Generates a `ag-option` tag with the specified xaxis.min value. """ + return OPTION(name="xaxis.min", value=value) + + +def XMAX(value): + """ Generates a `ag-option` tag with the specified xaxis.max value. """ + return OPTION(name="xaxis.max", value=value) + + +def AREA_CHART(*args, **kwargs): + """ Generates an `ag-chart` tag with `type='stackarea'`. """ + return _CHART(type='stackarea', *args, **kwargs) + + +def STATUS_INDICATOR(*args, **kwargs): + """Generates an `ag-status-indicator` tag with the passed in `name`,`hi`,`low` and METRIC attributes""" + return _STATUS_INDICATOR(*args, **kwargs) diff --git a/venv/lib/python2.7/site-packages/argusclient/model.py b/venv/lib/python2.7/site-packages/argusclient/model.py new file mode 100644 index 0000000..82184fd --- /dev/null +++ b/venv/lib/python2.7/site-packages/argusclient/model.py @@ -0,0 +1,500 @@ +""" +Module containing the classes that model the Argus base objects. +""" + +# +# Copyright (c) 2016, salesforce.com, inc. +# All rights reserved. +# Licensed under the BSD 3-Clause license. +# For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause +# + +import json + +try: + basestring # Python 2 +except NameError: + basestring = str # Python 3 + + +class BaseEncodable(object): + + def __init__(self, **kwargs): + for k, v in kwargs.items(): + setattr(self, k, v) + + def to_dict(self): + D = dict((k, v) for k, v in self.__dict__.items() if not k.startswith("_")) + return D + + @classmethod + def from_dict(cls, D): + for f in cls.id_fields: + if isinstance(f, tuple): + if any(alias in D for alias in f): + continue + else: + return None + elif f not in D: + return None + else: + return cls(**D) + + @property + def argus_id(self): + """ + The property that gives access to the Argus ID. This is ``None`` for new objects. + """ + return hasattr(self, "id") and int(self.id) or None + + @argus_id.setter + def argus_id(self, value): + self.id = value + + @property + def owner_id(self): + """ + The ID of the object that owns this object or ``None``. Only applicable to a few types that are not first-class objects. + """ + return hasattr(self, "owner_id_field") and hasattr(self, self.owner_id_field) and int(getattr(self, self.owner_id_field)) or None + + def __str__(self): + return str(self.to_dict()) + + def __repr__(self): + return str(self) + + def __hash__(self): + return hash(self.__dict__) + + def __eq__(self, other): + if not isinstance(other, type(self)): + return False + return self.__dict__ == other.__dict__ + + +class AddListResult(BaseEncodable): + """ + Represents the result of metric or annotation collection add request. + + Ex: {"Error Messages":[],"Error":"0 metrics","Success":"1 metrics"} + """ + + id_fields = ("Error", "Success") + + def error_messages(self): + """ Return any error messsages from the result. """ + return self.__dict__["Error Messages"] + + def error_count(self): + """ Return error count from the result. """ + numEnd = self.Error.index(" ") + return int(self.Error[0:numEnd]) + + def success_count(self): + """ Return success count from the result. """ + numEnd = self.Success.index(" ") + return int(self.Success[0:numEnd]) + + +class User(BaseEncodable): + """ + Represents a User object in Argus. + + **Required parameters to the constructor:** + + :param userName: The username of the Argus user + :type userName: str + + **Optional parameters to the constructor:** + + :param email: The email address of the Argus user + :type email: str + """ + + id_fields = ("userName", "email") + + def __init__(self, userName, **kwargs): + super(User, self).__init__(userName=userName, **kwargs) + + +class Metric(BaseEncodable): + """ + Represents a Metric object in Argus. + + **Required parameters to the constructor:** + + :param scope: The scope for the annotation + :type scope: str + :param metric: The metric name of the annotation + :type metric: str + + **Optional parameters to the constructor:** + + :param namespace: The namespace for the metric + :type namespace: str + :param displayName: The display name of the metric + :type displayName: str + :param unitType: The unit type of the metric value + :type unitType: str + :param datapoints: The actual metric data points as a dictionary of values with epoc timestamp as the keys. + :type datapoints: dict of int:object + :param tags: A dictionary of tags. Both keys and values should be valid strings. + :type tags: dict of str:str + """ + + id_fields = ("datapoints",) + + def __init__(self, scope, metric, **kwargs): + super(Metric, self).__init__(scope=scope, metric=metric, **kwargs) + if not hasattr(self, "datapoints") or self.datapoints is None: + self.datapoints = {} + if not hasattr(self, "tags") or self.tags is None: + self.tags = {} + + def __str__(self): + """ + Return a string representation of the metric that can be directly used as the metric expressoin in a metric query and has the format: + ``scope:metric[{tagk=tagv,...}][:namespace]`` + """ + tags = hasattr(self, "tags") and self.tags or None + metricWithTags = tags and "%s{%s}" % (self.metric, ",".join("%s=%s" % (k, v) for k, v in self.tags.items())) or self.metric + return ":".join(str(q) for q in (self.scope, metricWithTags, hasattr(self, "namespace") and self.namespace or None) if q) + + +class Annotation(BaseEncodable): + """ + Represents an Annotation object in Argus. + + **Required parameters to the constructor:** + + :param source: The source of the annotation + :type source: str + :param scope: The scope for the annotation + :type scope: str + :param metric: The metric name of the annotation + :type metric: str + :param timestamp: The timestamp of the annotation + :type timestamp: int + :param id: An external id for the annotation + :type id: int + + **Optional parameters to the constructor:** + + :param tags: A dictionary of tags. Both keys and values should be valid strings. + :type tags: dict of str:str + :param fields: A dictionary of fields. Both keys and values should be valid strings. + :type fields: dict of str:str + """ + + id_fields = ("source", "timestamp",) + + def __init__(self, source, scope, metric, id, timestamp, type, **kwargs): + super(Annotation, self).__init__(source=source, scope=scope, metric=metric, id=id, timestamp=timestamp, type=type, **kwargs) + if not hasattr(self, "fields") or self.fields is None: + self.fields = {} + if not hasattr(self, "tags") or self.tags is None: + self.tags = {} + + def __str__(self): + """ + Return a string representation of the annotation that can be directly used as the annotation expresson in an annotation query and has the format: + ``scope:metric[{tagk=tagv,...}]:source`` + """ + tags = hasattr(self, "tags") and self.tags or None + metricWithTags = tags and "%s{%s}" % (self.metric, ",".join("%s=%s" % (k, v) for k, v in self.tags.items())) or self.metric + return ":".join(str(q) for q in (self.scope, metricWithTags, self.source) if q) + + +class Dashboard(BaseEncodable): + """ + Represents a Dashboard object in Argus. + + Dashboard name has to be unique across the dashboards owned by the current user. + + **Required parameters to the constructor:** + + :param name: The name of the dashboard + :type name: str + :param content: The XML content + :type content: str + + **Optional parameters to the constructor:** + + :param description: A description for the dashboard + :type description: str + :param shared: The shared state of the dashboard. + :type shared: bool + :param id: The Argus id of the dashboard + :type id: int + """ + + id_fields = ("content",) + + def __init__(self, name, content, **kwargs): + super(Dashboard, self).__init__(name=name, content=content, **kwargs) + +class Permission(BaseEncodable): + """ + Represents a Permission object in Argus. + + **Required parameters to the constructor:** + + :param type: the type of permission - "user" or "group" + :type type: str + :param permissionNames: List of permissions that this user or group + has on the associated entity (id is put in the entityId field). + Permissions in this list are in the form of strings: like "VIEW", "EDIT", and "DELETE". + :type permissionNames: list of str + + **Optional parameters to the constructor:** + + :param groupId: id of the group that has the associated permissions + :type groupId: str + :param username: name of the user that has the associated permissions + :type username: str + :param permissionIds: List of permissions that this user or group + has on the associated entity (id is put in the entityId field). + Permissions in this list are in the form of integers: like 0, 1, and 2. + 0, 1, and 2 correspond to "VIEW", "EDIT", and "DELETE" respectively. + :type permissionIds: list of int + :param entityId: id of the associated entity + :type entityId: int + """ + + id_fields = ("permissionNames",) + + def __init__(self, type, permissionNames, **kwargs): + super(Permission, self).__init__(type=type, permissionNames=permissionNames, **kwargs) + + +class Namespace(BaseEncodable): + """ + Represents a Namespace object in Argus. + + **Required parameters to the constructor:** + + :param qualifier: The namespace qualifier + :type qualifier: str + + **Optional parameters to the constructor:** + + :param usernames: The list of usernames that are authorized to post metrics to the namespace. + :type usernames: list of str + :param id: The Argus id of this namespace + :type id: int + """ + + id_fields = ("qualifier",) + + def __init__(self, qualifier, **kwargs): + assert qualifier and isinstance(qualifier, basestring), "A string qualifier is required for namespace" + super(Namespace, self).__init__(qualifier=qualifier, **kwargs) + + +class Alert(BaseEncodable): + """ + Represents an Alert object in Argus. + + Alert name has to be unique across the alerts owned by the current user. + + **Required parameters to the constructor:** + + :param name: The name of the alert + :type name: str + :param expression: The metric query expression + :type expression: str + :param cronEntry: The cron expression + :type cronEntry: str + + **Optional parameters to the constructor:** + + :param enabled: The enabled state of the alert + :type enabled: bool + :param missingDataNotificationEnabled: The enabled state of missing data notification + :type missingDataNotificationEnabled: bool + :param triggerIds: The list of IDs for the triggers owned by this alert. + :type triggerIds: list of int + :param notificationIds: The list of IDs for the notifications owned by this alert. + :type notificationIds: list of int + :param shared: The shared state of the alert + :type enabled: bool + """ + + id_fields = ("expression", "cronEntry",) + + def __init__(self, name, expression, cronEntry, **kwargs): + self._triggers = None + self._notifications = None + super(Alert, self).__init__(name=name, expression=expression, cronEntry=cronEntry, **kwargs) + + @property + def trigger(self): + """ A convenience property to be used when :attr:`triggers` contains a single :class:`argusclient.model.Trigger`. """ + return self._triggers and len(self._triggers) == 1 and self._triggers[0] or None + + @trigger.setter + def trigger(self, value): + if not isinstance(value, Trigger): raise ValueError("argument should be of Trigger type, but is: %s" % type(value)) + if not ((value.owner_id is None and self.argus_id is None) or value.owner_id == self.argus_id): raise ValueError("trigger owned by alert id: %s not by %s" % (value.owner_id, self.argus_id)) + self._triggers = [value] + + @property + def triggers(self): + """ Property to get and set triggers on the alert. """ + return self._triggers + + @triggers.setter + def triggers(self, value): + if not isinstance(value, list): raise ValueError("value should be of list type, but is: %s" % type(value)) + # This is a special case allowed only while adding new alerts, so ensure that argus_id of self and the objects is None. + # TODO Check for item type also + self._triggers = value + + @property + def notification(self): + """ A convenience property to be used when :attr:`notifications` contains a single :class:`argusclient.model.Notification`. """ + return self._notifications and len(self._notifications) == 1 and self._notifications[0] or None + + @notification.setter + def notification(self, value): + if not isinstance(value, Notification): raise ValueError("value should be of Notification type, but is: %s" % type(value)) + if not ((value.owner_id is None and self.argus_id is None) or value.owner_id == self.argus_id): raise ValueError("notification owned by alert id: %s not by %s" % (value.owner_id, self.argus_id)) + self._notifications = [value] + + @property + def notifications(self): + """ Property to get and set notifications on the alert. """ + return self._notifications + + @notifications.setter + def notifications(self, value): + if not isinstance(value, list): raise ValueError("value should be of list type, but is: %s" % type(value)) + # This is a special case allowed only while adding new alerts, so ensure that argus_id of self and the objects is None. + # TODO Check for item type also + self._notifications = value + + +class Trigger(BaseEncodable): + """ + Represents a Trigger object in Argus. + + **Required parameters to the constructor:** + + :param name: Name of the trigger + :type name: str + :param type: Type of the trigger. Must be one of these: :attr:`GREATER_THAN`, :attr:`GREATER_THAN_OR_EQ`, :attr:`LESS_THAN`, :attr:`LESS_THAN_OR_EQ`, :attr:`EQUAL`, :attr:`NOT_EQUAL`, :attr:`BETWEEN`, :attr:`NOT_BETWEEN`, :attr:`NO_DATA`. + :type type: str + :param threshold: Threshold for the trigger + :type threshold: float + :param inertia: Inertia for the trigger + :type inertia: int + + **Optional parameters to the constructor:** + + :param secondaryThreshold: Secondary threshold. + :type secondaryThreshold: float + :param notificationIds: List of IDs of notifications that this trigger is associated with. + :type notificationIds: list of int + :param alertId: ID of the alert that this trigger belongs to. + :type alertId: int + """ + + id_fields = ("threshold",) + owner_id_field = "alertId" + + GREATER_THAN = "GREATER_THAN" + GREATER_THAN_OR_EQ = "GREATER_THAN_OR_EQ" + LESS_THAN = "LESS_THAN" + LESS_THAN_OR_EQ = "LESS_THAN_OR_EQ" + EQUAL = "EQUAL" + NOT_EQUAL = "NOT_EQUAL" + BETWEEN = "BETWEEN" + NOT_BETWEEN = "NOT_BETWEEN" + NO_DATA = "NO_DATA" + + #: Set of all valid trigger types. + VALID_TYPES = frozenset((GREATER_THAN, GREATER_THAN_OR_EQ, LESS_THAN, LESS_THAN_OR_EQ, EQUAL, NOT_EQUAL, BETWEEN, NOT_BETWEEN, NO_DATA)) + + def __init__(self, name, type, threshold, inertia, **kwargs): + assert type in Trigger.VALID_TYPES, "type is not valid: %s" % type + super(Trigger, self).__init__(name=name, type=type, threshold=threshold, inertia=inertia, **kwargs) + + +class Notification(BaseEncodable): + """ + Represents a Notification object in Argus. + + **Required parameters to the constructor:** + + :param name: The name of the notification + :type name: str + :param notifierName: The name of the notifier implementation. Must be one of :attr:`EMAIL`, :attr:`AUDIT`, + :attr:`GOC`, :attr:`GUS`, :attr:`CALLBACK`, :attr:`PAGER_DUTY`, :attr:`REFOCUS_BOOLEAN`, + :attr:`REFOCUS_VALUE`, :attr:`SLACK`, :attr:`ALERT_ROUTER` + :type notifierName or notifier: str + + **Optional parameters to the constructor:** + + :param subscriptions: The subscriptions for the notifier implementation, such as email ids in case of :attr:`EMAIL`. + :type subscriptions: list of str + :param cooldownPeriod: The cooldown period + :type cooldownPeriod: float + :param cooldownExpiration: The cooldown expiration + :type cooldownExpiration: float + :param triggerIds: List of IDs of triggers that this notification is associated with. + :type triggerIds: list of int + :param alertId: ID of the alert that this trigger belongs to. + :type alertId: int + """ + + id_fields = (("notifierName", "notifier"),) + owner_id_field = "alertId" + + EMAIL = "com.salesforce.dva.argus.service.alert.notifier.EmailNotifier" + AUDIT = "com.salesforce.dva.argus.service.alert.notifier.AuditNotifier" + GOC = "com.salesforce.dva.argus.service.alert.notifier.GOCNotifier" + GUS = "com.salesforce.dva.argus.service.alert.notifier.GusNotifier" + CALLBACK = "com.salesforce.dva.argus.service.alert.notifier.CallbackNotifier" + PAGER_DUTY = "com.salesforce.dva.argus.service.alert.notifier.PagerDutyNotifier" + REFOCUS_BOOLEAN = "com.salesforce.dva.argus.service.alert.notifier.RefocusBooleanNotifier" + REFOCUS_VALUE = "com.salesforce.dva.argus.service.alert.notifier.RefocusValueNotifier" + SLACK = "com.salesforce.dva.argus.service.alert.notifier.SlackNotifier" + ALERT_ROUTER = "com.salesforce.dva.argus.service.alert.notifier.AlertRouterNotifier" + + #: Set of all valid notifier implementation names. + VALID_NOTIFIERS = frozenset((EMAIL, AUDIT, GOC, GUS, CALLBACK, PAGER_DUTY, + REFOCUS_BOOLEAN, REFOCUS_VALUE, SLACK, ALERT_ROUTER)) + + def __init__(self, name, notifierName=None, metricsToAnnotate=None, **kwargs): + notifierName = notifierName or kwargs.get('notifier') + assert notifierName in Notification.VALID_NOTIFIERS, "notifierName is not valid: %s" % notifierName + super(Notification, self).__init__(name=name, notifierName=notifierName, metricsToAnnotate=metricsToAnnotate or [], + **kwargs) + + +class JsonEncoder(json.JSONEncoder): + def default(self, obj): + return self.to_json(obj) + + def to_json(self, obj): + if isinstance(obj, BaseEncodable): + return obj.to_dict() + + return json.JSONEncoder.default(self, obj) + + +class JsonDecoder(json.JSONDecoder): + def __init__(self, *args, **kwargs): + kwargs['object_hook'] = self.from_json + super(JsonDecoder, self).__init__(*args, **kwargs) + + def from_json(self, jsonObj): + if not jsonObj or not isinstance(jsonObj, dict): + return jsonObj + for cls in (Metric, Dashboard, AddListResult, User, Namespace, Annotation, Alert, Trigger, Notification, Permission): + obj = cls.from_dict(jsonObj) + if obj: + return obj + else: + return jsonObj diff --git a/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/DESCRIPTION.rst b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..08533b5 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,3 @@ +Docs for this project are maintained at https://github.com/wbond/asn1crypto#readme. + + diff --git a/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/INSTALLER b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/METADATA b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/METADATA new file mode 100644 index 0000000..943af2b --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/METADATA @@ -0,0 +1,27 @@ +Metadata-Version: 2.0 +Name: asn1crypto +Version: 0.24.0 +Summary: Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP +Home-page: https://github.com/wbond/asn1crypto +Author: wbond +Author-email: will@wbond.net +License: MIT +Description-Content-Type: UNKNOWN +Keywords: asn1 crypto pki x509 certificate rsa dsa ec dh +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography + +Docs for this project are maintained at https://github.com/wbond/asn1crypto#readme. + + diff --git a/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/RECORD b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/RECORD new file mode 100644 index 0000000..a94d31a --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/RECORD @@ -0,0 +1,62 @@ +asn1crypto-0.24.0.dist-info/DESCRIPTION.rst,sha256=AKxcPr8A1r7Lgepi1zxda_bylQUpelEwT9VouCPpXFA,86 +asn1crypto-0.24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +asn1crypto-0.24.0.dist-info/METADATA,sha256=k2awXhDUj-10m5VIjPmW4_v1_g8IxusRxHuHmSikb-c,1132 +asn1crypto-0.24.0.dist-info/RECORD,, +asn1crypto-0.24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +asn1crypto-0.24.0.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 +asn1crypto-0.24.0.dist-info/metadata.json,sha256=GWUZOttbqBGOXofIFkjNpPeItn-MlCQZLATLKFjoHL4,1177 +asn1crypto-0.24.0.dist-info/top_level.txt,sha256=z8-jF_Q-jgzGox7T2XYian3-yeptLS2I7MjoJLBaq1Y,11 +asn1crypto/__init__.py,sha256=d-HnaY-IU0N1aepUI9bYR11J2EGccGAN8kDvKMgGu78,209 +asn1crypto/__init__.pyc,, +asn1crypto/_elliptic_curve.py,sha256=vdORN6l6XZyCQpA-9tdk71IrGLciAHEimOc5KsG7YNw,9419 +asn1crypto/_elliptic_curve.pyc,, +asn1crypto/_errors.py,sha256=YCCXkifzLkCnxo-iIO-oIWY-UuPZNbEai7z94ps8ck4,967 +asn1crypto/_errors.pyc,, +asn1crypto/_ffi.py,sha256=Y4IhTLrLTxt7Xg5vcQFLAvH_Mq8xg3LTcmkIJn-G8eE,738 +asn1crypto/_ffi.pyc,, +asn1crypto/_inet.py,sha256=z2K8CKxSfEQ3d0UMIIgXw5NugKLZyRS__Cn10ivbiGM,4661 +asn1crypto/_inet.pyc,, +asn1crypto/_int.py,sha256=I-INe1rHGsQfV6vndtD53fhyjsEOG5a6WoDY4w6M2B4,4618 +asn1crypto/_int.pyc,, +asn1crypto/_iri.py,sha256=5gG6VBG-tYAQT6lWnjvHRH-RvIStaeG_PxhyPLnL-KQ,8628 +asn1crypto/_iri.pyc,, +asn1crypto/_ordereddict.py,sha256=5VAYLbxxEtfdZGKgjzINxlmNkYg9d_7JmZAFfr0EcAk,4533 +asn1crypto/_ordereddict.pyc,, +asn1crypto/_perf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +asn1crypto/_perf/__init__.pyc,, +asn1crypto/_perf/_big_num_ctypes.py,sha256=bc1WXhoR08nB-l0mOutKldupcjybswWckOxquiXByx8,2027 +asn1crypto/_perf/_big_num_ctypes.pyc,, +asn1crypto/_teletex_codec.py,sha256=LhDpkTprPaoc7UJ9i9uwnP-5Am00SVbHSQizPpCLpqE,5053 +asn1crypto/_teletex_codec.pyc,, +asn1crypto/_types.py,sha256=OwsX30epv-ETI9eGrLW9GLqv0KeoiSRnJcjN92roqhQ,939 +asn1crypto/_types.pyc,, +asn1crypto/algos.py,sha256=zcJgP8VnZTT60ijR5-RPuhI9v4kcxrWgzv7_EjlZfW0,34095 +asn1crypto/algos.pyc,, +asn1crypto/cms.py,sha256=BOWS3bT33ZfmMpdL_yogG-3PwI2fbWEr1F7aw_5PFZo,25118 +asn1crypto/cms.pyc,, +asn1crypto/core.py,sha256=IDrq3cOH6xxc64nQcHr9LUdKvr2DU4ehd49I6k5XOcM,157260 +asn1crypto/core.pyc,, +asn1crypto/crl.py,sha256=KJLuEn1IDJO19X4eLYhRyaM-ACnxKkimoGsyAnzGdgA,16104 +asn1crypto/crl.pyc,, +asn1crypto/csr.py,sha256=CP0tOGyHPEV3ZpnB7L4L9X-X1epD6TVixal6UAtTvLQ,2142 +asn1crypto/csr.pyc,, +asn1crypto/keys.py,sha256=fH9hBBq2buS378sKS8OYFHpRSQSOU2qCSnxqMeeGBHk,35185 +asn1crypto/keys.pyc,, +asn1crypto/ocsp.py,sha256=jF_F5-xwcATqxs-2Qpo9V83lhfDNYuZZTd4ODpKnyAM,17792 +asn1crypto/ocsp.pyc,, +asn1crypto/parser.py,sha256=pkyVS-BJk4mDTve0mZMUmrrUV_IHnQk9AvgtkEvfEKM,9149 +asn1crypto/parser.pyc,, +asn1crypto/pdf.py,sha256=HNybnna5WG2ftmb8Nx_T5reyLJ0E7hJXoj37DuLbpX0,2250 +asn1crypto/pdf.pyc,, +asn1crypto/pem.py,sha256=s46r_KCQ9h1HENXMh4AGKTXesivQrKnWzU3-gok75uI,6145 +asn1crypto/pem.pyc,, +asn1crypto/pkcs12.py,sha256=q-KGfvaO72B8AfvolwsqhAQpjuqnkEczPddXYLBFUSE,4566 +asn1crypto/pkcs12.pyc,, +asn1crypto/tsp.py,sha256=jpjpFmWBwX4GUVrYu9Gnk6YXRnzb-uVFvfaJSo-m_2Q,7827 +asn1crypto/tsp.pyc,, +asn1crypto/util.py,sha256=m3dc7XtmQiq__uC0G2jcyHYkaJPsnm7KGAWiCXj-xio,18043 +asn1crypto/util.pyc,, +asn1crypto/version.py,sha256=SqGlEZKUpqLOswcILajD1bITTOejwN3Jvlnw325hHjQ,154 +asn1crypto/version.pyc,, +asn1crypto/x509.py,sha256=IGKOKTX3GWIsZhTtcpbcDkFWjpoTozNPlItYZ8pY8QA,92305 +asn1crypto/x509.pyc,, diff --git a/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/REQUESTED b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/WHEEL b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/WHEEL new file mode 100644 index 0000000..0de529b --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/metadata.json b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/metadata.json new file mode 100644 index 0000000..caa3c3a --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP", "classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography"], "description_content_type": "UNKNOWN", "extensions": {"python.details": {"project_urls": {"Home": "https://github.com/wbond/asn1crypto"}, "contacts": [{"email": "will@wbond.net", "name": "wbond", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "keywords": ["asn1", "crypto", "pki", "x509", "certificate", "rsa", "dsa", "ec", "dh"], "license": "MIT", "metadata_version": "2.0", "name": "asn1crypto", "version": "0.24.0"} \ No newline at end of file diff --git a/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/top_level.txt b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/top_level.txt new file mode 100644 index 0000000..35a704e --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto-0.24.0.dist-info/top_level.txt @@ -0,0 +1 @@ +asn1crypto diff --git a/venv/lib/python2.7/site-packages/asn1crypto/__init__.py b/venv/lib/python2.7/site-packages/asn1crypto/__init__.py new file mode 100644 index 0000000..afdeb43 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/__init__.py @@ -0,0 +1,9 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .version import __version__, __version_info__ + +__all__ = [ + '__version__', + '__version_info__', +] diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_elliptic_curve.py b/venv/lib/python2.7/site-packages/asn1crypto/_elliptic_curve.py new file mode 100644 index 0000000..8c0f12d --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/_elliptic_curve.py @@ -0,0 +1,314 @@ +# coding: utf-8 + +""" +Classes and objects to represent prime-field elliptic curves and points on them. +Exports the following items: + + - PrimeCurve() + - PrimePoint() + - SECP192R1_CURVE + - SECP192R1_BASE_POINT + - SECP224R1_CURVE + - SECP224R1_BASE_POINT + - SECP256R1_CURVE + - SECP256R1_BASE_POINT + - SECP384R1_CURVE + - SECP384R1_BASE_POINT + - SECP521R1_CURVE + - SECP521R1_BASE_POINT + +The curve constants are all PrimeCurve() objects and the base point constants +are all PrimePoint() objects. + +Some of the following source code is derived from +http://webpages.charter.net/curryfans/peter/downloads.html, but has been heavily +modified to fit into this projects lint settings. The original project license +is listed below: + +Copyright (c) 2014 Peter Pearson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from ._int import inverse_mod + + +class PrimeCurve(): + """ + Elliptic curve over a prime field. Characteristic two field curves are not + supported. + """ + + def __init__(self, p, a, b): + """ + The curve of points satisfying y^2 = x^3 + a*x + b (mod p) + + :param p: + The prime number as an integer + + :param a: + The component a as an integer + + :param b: + The component b as an integer + """ + + self.p = p + self.a = a + self.b = b + + def contains(self, point): + """ + :param point: + A Point object + + :return: + Boolean if the point is on this curve + """ + + y2 = point.y * point.y + x3 = point.x * point.x * point.x + return (y2 - (x3 + self.a * point.x + self.b)) % self.p == 0 + + +class PrimePoint(): + """ + A point on a prime-field elliptic curve + """ + + def __init__(self, curve, x, y, order=None): + """ + :param curve: + A PrimeCurve object + + :param x: + The x coordinate of the point as an integer + + :param y: + The y coordinate of the point as an integer + + :param order: + The order of the point, as an integer - optional + """ + + self.curve = curve + self.x = x + self.y = y + self.order = order + + # self.curve is allowed to be None only for INFINITY: + if self.curve: + if not self.curve.contains(self): + raise ValueError('Invalid EC point') + + if self.order: + if self * self.order != INFINITY: + raise ValueError('Invalid EC point') + + def __cmp__(self, other): + """ + :param other: + A PrimePoint object + + :return: + 0 if identical, 1 otherwise + """ + if self.curve == other.curve and self.x == other.x and self.y == other.y: + return 0 + else: + return 1 + + def __add__(self, other): + """ + :param other: + A PrimePoint object + + :return: + A PrimePoint object + """ + + # X9.62 B.3: + + if other == INFINITY: + return self + if self == INFINITY: + return other + assert self.curve == other.curve + if self.x == other.x: + if (self.y + other.y) % self.curve.p == 0: + return INFINITY + else: + return self.double() + + p = self.curve.p + + l_ = ((other.y - self.y) * inverse_mod(other.x - self.x, p)) % p + + x3 = (l_ * l_ - self.x - other.x) % p + y3 = (l_ * (self.x - x3) - self.y) % p + + return PrimePoint(self.curve, x3, y3) + + def __mul__(self, other): + """ + :param other: + An integer to multiple the Point by + + :return: + A PrimePoint object + """ + + def leftmost_bit(x): + assert x > 0 + result = 1 + while result <= x: + result = 2 * result + return result // 2 + + e = other + if self.order: + e = e % self.order + if e == 0: + return INFINITY + if self == INFINITY: + return INFINITY + assert e > 0 + + # From X9.62 D.3.2: + + e3 = 3 * e + negative_self = PrimePoint(self.curve, self.x, -self.y, self.order) + i = leftmost_bit(e3) // 2 + result = self + # print "Multiplying %s by %d (e3 = %d):" % ( self, other, e3 ) + while i > 1: + result = result.double() + if (e3 & i) != 0 and (e & i) == 0: + result = result + self + if (e3 & i) == 0 and (e & i) != 0: + result = result + negative_self + # print ". . . i = %d, result = %s" % ( i, result ) + i = i // 2 + + return result + + def __rmul__(self, other): + """ + :param other: + An integer to multiple the Point by + + :return: + A PrimePoint object + """ + + return self * other + + def double(self): + """ + :return: + A PrimePoint object that is twice this point + """ + + # X9.62 B.3: + + p = self.curve.p + a = self.curve.a + + l_ = ((3 * self.x * self.x + a) * inverse_mod(2 * self.y, p)) % p + + x3 = (l_ * l_ - 2 * self.x) % p + y3 = (l_ * (self.x - x3) - self.y) % p + + return PrimePoint(self.curve, x3, y3) + + +# This one point is the Point At Infinity for all purposes: +INFINITY = PrimePoint(None, None, None) + + +# NIST Curve P-192: +SECP192R1_CURVE = PrimeCurve( + 6277101735386680763835789423207666416083908700390324961279, + -3, + 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1 +) +SECP192R1_BASE_POINT = PrimePoint( + SECP192R1_CURVE, + 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012, + 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811, + 6277101735386680763835789423176059013767194773182842284081 +) + + +# NIST Curve P-224: +SECP224R1_CURVE = PrimeCurve( + 26959946667150639794667015087019630673557916260026308143510066298881, + -3, + 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4 +) +SECP224R1_BASE_POINT = PrimePoint( + SECP224R1_CURVE, + 0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21, + 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34, + 26959946667150639794667015087019625940457807714424391721682722368061 +) + + +# NIST Curve P-256: +SECP256R1_CURVE = PrimeCurve( + 115792089210356248762697446949407573530086143415290314195533631308867097853951, + -3, + 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b +) +SECP256R1_BASE_POINT = PrimePoint( + SECP256R1_CURVE, + 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296, + 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5, + 115792089210356248762697446949407573529996955224135760342422259061068512044369 +) + + +# NIST Curve P-384: +SECP384R1_CURVE = PrimeCurve( + 39402006196394479212279040100143613805079739270465446667948293404245721771496870329047266088258938001861606973112319, # noqa + -3, + 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef +) +SECP384R1_BASE_POINT = PrimePoint( + SECP384R1_CURVE, + 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7, + 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5f, + 39402006196394479212279040100143613805079739270465446667946905279627659399113263569398956308152294913554433653942643 +) + + +# NIST Curve P-521: +SECP521R1_CURVE = PrimeCurve( + 6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151, # noqa + -3, + 0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00 # noqa +) +SECP521R1_BASE_POINT = PrimePoint( + SECP521R1_CURVE, + 0xc6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66, # noqa + 0x11839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650, # noqa + 6864797660130609714981900799081393217269435300143305409394463459185543183397655394245057746333217197532963996371363321113864768612440380340372808892707005449 # noqa +) diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_errors.py b/venv/lib/python2.7/site-packages/asn1crypto/_errors.py new file mode 100644 index 0000000..cc785a5 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/_errors.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" +Helper for formatting exception messages. Exports the following items: + + - unwrap() +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +import re +import textwrap + + +def unwrap(string, *params): + """ + Takes a multi-line string and does the following: + + - dedents + - converts newlines with text before and after into a single line + - strips leading and trailing whitespace + + :param string: + The string to format + + :param *params: + Params to interpolate into the string + + :return: + The formatted string + """ + + output = textwrap.dedent(string) + + # Unwrap lines, taking into account bulleted lists, ordered lists and + # underlines consisting of = signs + if output.find('\n') != -1: + output = re.sub('(?<=\\S)\n(?=[^ \n\t\\d\\*\\-=])', ' ', output) + + if params: + output = output % params + + output = output.strip() + + return output diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_ffi.py b/venv/lib/python2.7/site-packages/asn1crypto/_ffi.py new file mode 100644 index 0000000..2a4f5bf --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/_ffi.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" +FFI helper compatibility functions. Exports the following items: + + - LibraryNotFoundError + - FFIEngineError + - bytes_from_buffer() + - buffer_from_bytes() + - null() +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from ctypes import create_string_buffer + + +def buffer_from_bytes(initializer): + return create_string_buffer(initializer) + + +def bytes_from_buffer(buffer, maxlen=None): + return buffer.raw + + +def null(): + return None + + +class LibraryNotFoundError(Exception): + + """ + An exception when trying to find a shared library + """ + + pass + + +class FFIEngineError(Exception): + + """ + An exception when trying to instantiate ctypes or cffi + """ + + pass diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_inet.py b/venv/lib/python2.7/site-packages/asn1crypto/_inet.py new file mode 100644 index 0000000..045ba56 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/_inet.py @@ -0,0 +1,170 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +import socket +import struct + +from ._errors import unwrap +from ._types import byte_cls, bytes_to_list, str_cls, type_name + + +def inet_ntop(address_family, packed_ip): + """ + Windows compatibility shim for socket.inet_ntop(). + + :param address_family: + socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6 + + :param packed_ip: + A byte string of the network form of an IP address + + :return: + A unicode string of the IP address + """ + + if address_family not in set([socket.AF_INET, socket.AF_INET6]): + raise ValueError(unwrap( + ''' + address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s), + not %s + ''', + repr(socket.AF_INET), + repr(socket.AF_INET6), + repr(address_family) + )) + + if not isinstance(packed_ip, byte_cls): + raise TypeError(unwrap( + ''' + packed_ip must be a byte string, not %s + ''', + type_name(packed_ip) + )) + + required_len = 4 if address_family == socket.AF_INET else 16 + if len(packed_ip) != required_len: + raise ValueError(unwrap( + ''' + packed_ip must be %d bytes long - is %d + ''', + required_len, + len(packed_ip) + )) + + if address_family == socket.AF_INET: + return '%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip)) + + octets = struct.unpack(b'!HHHHHHHH', packed_ip) + + runs_of_zero = {} + longest_run = 0 + zero_index = None + for i, octet in enumerate(octets + (-1,)): + if octet != 0: + if zero_index is not None: + length = i - zero_index + if length not in runs_of_zero: + runs_of_zero[length] = zero_index + longest_run = max(longest_run, length) + zero_index = None + elif zero_index is None: + zero_index = i + + hexed = [hex(o)[2:] for o in octets] + + if longest_run < 2: + return ':'.join(hexed) + + zero_start = runs_of_zero[longest_run] + zero_end = zero_start + longest_run + + return ':'.join(hexed[:zero_start]) + '::' + ':'.join(hexed[zero_end:]) + + +def inet_pton(address_family, ip_string): + """ + Windows compatibility shim for socket.inet_ntop(). + + :param address_family: + socket.AF_INET for IPv4 or socket.AF_INET6 for IPv6 + + :param ip_string: + A unicode string of an IP address + + :return: + A byte string of the network form of the IP address + """ + + if address_family not in set([socket.AF_INET, socket.AF_INET6]): + raise ValueError(unwrap( + ''' + address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s), + not %s + ''', + repr(socket.AF_INET), + repr(socket.AF_INET6), + repr(address_family) + )) + + if not isinstance(ip_string, str_cls): + raise TypeError(unwrap( + ''' + ip_string must be a unicode string, not %s + ''', + type_name(ip_string) + )) + + if address_family == socket.AF_INET: + octets = ip_string.split('.') + error = len(octets) != 4 + if not error: + ints = [] + for o in octets: + o = int(o) + if o > 255 or o < 0: + error = True + break + ints.append(o) + + if error: + raise ValueError(unwrap( + ''' + ip_string must be a dotted string with four integers in the + range of 0 to 255, got %s + ''', + repr(ip_string) + )) + + return struct.pack(b'!BBBB', *ints) + + error = False + omitted = ip_string.count('::') + if omitted > 1: + error = True + elif omitted == 0: + octets = ip_string.split(':') + error = len(octets) != 8 + else: + begin, end = ip_string.split('::') + begin_octets = begin.split(':') + end_octets = end.split(':') + missing = 8 - len(begin_octets) - len(end_octets) + octets = begin_octets + (['0'] * missing) + end_octets + + if not error: + ints = [] + for o in octets: + o = int(o, 16) + if o > 65535 or o < 0: + error = True + break + ints.append(o) + + return struct.pack(b'!HHHHHHHH', *ints) + + raise ValueError(unwrap( + ''' + ip_string must be a valid ipv6 string, got %s + ''', + repr(ip_string) + )) diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_int.py b/venv/lib/python2.7/site-packages/asn1crypto/_int.py new file mode 100644 index 0000000..d0c2319 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/_int.py @@ -0,0 +1,159 @@ +# coding: utf-8 + +""" +Function for calculating the modular inverse. Exports the following items: + + - inverse_mod() + +Source code is derived from +http://webpages.charter.net/curryfans/peter/downloads.html, but has been heavily +modified to fit into this projects lint settings. The original project license +is listed below: + +Copyright (c) 2014 Peter Pearson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +import math +import platform + +from .util import int_to_bytes, int_from_bytes + +# First try to use ctypes with OpenSSL for better performance +try: + from ._ffi import ( + buffer_from_bytes, + bytes_from_buffer, + FFIEngineError, + LibraryNotFoundError, + null, + ) + + # Some versions of PyPy have segfault issues, so we just punt on PyPy + if platform.python_implementation() == 'PyPy': + raise EnvironmentError() + + try: + from ._perf._big_num_ctypes import libcrypto + + def inverse_mod(a, p): + """ + Compute the modular inverse of a (mod p) + + :param a: + An integer + + :param p: + An integer + + :return: + An integer + """ + + ctx = libcrypto.BN_CTX_new() + + a_bytes = int_to_bytes(abs(a)) + p_bytes = int_to_bytes(abs(p)) + + a_buf = buffer_from_bytes(a_bytes) + a_bn = libcrypto.BN_bin2bn(a_buf, len(a_bytes), null()) + if a < 0: + libcrypto.BN_set_negative(a_bn, 1) + + p_buf = buffer_from_bytes(p_bytes) + p_bn = libcrypto.BN_bin2bn(p_buf, len(p_bytes), null()) + if p < 0: + libcrypto.BN_set_negative(p_bn, 1) + + r_bn = libcrypto.BN_mod_inverse(null(), a_bn, p_bn, ctx) + r_len_bits = libcrypto.BN_num_bits(r_bn) + r_len = int(math.ceil(r_len_bits / 8)) + r_buf = buffer_from_bytes(r_len) + libcrypto.BN_bn2bin(r_bn, r_buf) + r_bytes = bytes_from_buffer(r_buf, r_len) + result = int_from_bytes(r_bytes) + + libcrypto.BN_free(a_bn) + libcrypto.BN_free(p_bn) + libcrypto.BN_free(r_bn) + libcrypto.BN_CTX_free(ctx) + + return result + except (LibraryNotFoundError, FFIEngineError): + raise EnvironmentError() + +# If there was an issue using ctypes or OpenSSL, we fall back to pure python +except (EnvironmentError, ImportError): + + def inverse_mod(a, p): + """ + Compute the modular inverse of a (mod p) + + :param a: + An integer + + :param p: + An integer + + :return: + An integer + """ + + if a < 0 or p <= a: + a = a % p + + # From Ferguson and Schneier, roughly: + + c, d = a, p + uc, vc, ud, vd = 1, 0, 0, 1 + while c != 0: + q, c, d = divmod(d, c) + (c,) + uc, vc, ud, vd = ud - q * uc, vd - q * vc, uc, vc + + # At this point, d is the GCD, and ud*a+vd*p = d. + # If d == 1, this means that ud is a inverse. + + assert d == 1 + if ud > 0: + return ud + else: + return ud + p + + +def fill_width(bytes_, width): + """ + Ensure a byte string representing a positive integer is a specific width + (in bytes) + + :param bytes_: + The integer byte string + + :param width: + The desired width as an integer + + :return: + A byte string of the width specified + """ + + while len(bytes_) < width: + bytes_ = b'\x00' + bytes_ + return bytes_ diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_iri.py b/venv/lib/python2.7/site-packages/asn1crypto/_iri.py new file mode 100644 index 0000000..57ddd40 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/_iri.py @@ -0,0 +1,288 @@ +# coding: utf-8 + +""" +Functions to convert unicode IRIs into ASCII byte string URIs and back. Exports +the following items: + + - iri_to_uri() + - uri_to_iri() +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from encodings import idna # noqa +import codecs +import re +import sys + +from ._errors import unwrap +from ._types import byte_cls, str_cls, type_name, bytes_to_list, int_types + +if sys.version_info < (3,): + from urlparse import urlsplit, urlunsplit + from urllib import ( + quote as urlquote, + unquote as unquote_to_bytes, + ) + +else: + from urllib.parse import ( + quote as urlquote, + unquote_to_bytes, + urlsplit, + urlunsplit, + ) + + +def iri_to_uri(value): + """ + Normalizes and encodes a unicode IRI into an ASCII byte string URI + + :param value: + A unicode string of an IRI + + :return: + A byte string of the ASCII-encoded URI + """ + + if not isinstance(value, str_cls): + raise TypeError(unwrap( + ''' + value must be a unicode string, not %s + ''', + type_name(value) + )) + + scheme = None + # Python 2.6 doesn't split properly is the URL doesn't start with http:// or https:// + if sys.version_info < (2, 7) and not value.startswith('http://') and not value.startswith('https://'): + real_prefix = None + prefix_match = re.match('^[^:]*://', value) + if prefix_match: + real_prefix = prefix_match.group(0) + value = 'http://' + value[len(real_prefix):] + parsed = urlsplit(value) + if real_prefix: + value = real_prefix + value[7:] + scheme = _urlquote(real_prefix[:-3]) + else: + parsed = urlsplit(value) + + if scheme is None: + scheme = _urlquote(parsed.scheme) + hostname = parsed.hostname + if hostname is not None: + hostname = hostname.encode('idna') + # RFC 3986 allows userinfo to contain sub-delims + username = _urlquote(parsed.username, safe='!$&\'()*+,;=') + password = _urlquote(parsed.password, safe='!$&\'()*+,;=') + port = parsed.port + if port is not None: + port = str_cls(port).encode('ascii') + + netloc = b'' + if username is not None: + netloc += username + if password: + netloc += b':' + password + netloc += b'@' + if hostname is not None: + netloc += hostname + if port is not None: + default_http = scheme == b'http' and port == b'80' + default_https = scheme == b'https' and port == b'443' + if not default_http and not default_https: + netloc += b':' + port + + # RFC 3986 allows a path to contain sub-delims, plus "@" and ":" + path = _urlquote(parsed.path, safe='/!$&\'()*+,;=@:') + # RFC 3986 allows the query to contain sub-delims, plus "@", ":" , "/" and "?" + query = _urlquote(parsed.query, safe='/?!$&\'()*+,;=@:') + # RFC 3986 allows the fragment to contain sub-delims, plus "@", ":" , "/" and "?" + fragment = _urlquote(parsed.fragment, safe='/?!$&\'()*+,;=@:') + + if query is None and fragment is None and path == b'/': + path = None + + # Python 2.7 compat + if path is None: + path = '' + + output = urlunsplit((scheme, netloc, path, query, fragment)) + if isinstance(output, str_cls): + output = output.encode('latin1') + return output + + +def uri_to_iri(value): + """ + Converts an ASCII URI byte string into a unicode IRI + + :param value: + An ASCII-encoded byte string of the URI + + :return: + A unicode string of the IRI + """ + + if not isinstance(value, byte_cls): + raise TypeError(unwrap( + ''' + value must be a byte string, not %s + ''', + type_name(value) + )) + + parsed = urlsplit(value) + + scheme = parsed.scheme + if scheme is not None: + scheme = scheme.decode('ascii') + + username = _urlunquote(parsed.username, remap=[':', '@']) + password = _urlunquote(parsed.password, remap=[':', '@']) + hostname = parsed.hostname + if hostname: + hostname = hostname.decode('idna') + port = parsed.port + if port and not isinstance(port, int_types): + port = port.decode('ascii') + + netloc = '' + if username is not None: + netloc += username + if password: + netloc += ':' + password + netloc += '@' + if hostname is not None: + netloc += hostname + if port is not None: + netloc += ':' + str_cls(port) + + path = _urlunquote(parsed.path, remap=['/'], preserve=True) + query = _urlunquote(parsed.query, remap=['&', '='], preserve=True) + fragment = _urlunquote(parsed.fragment) + + return urlunsplit((scheme, netloc, path, query, fragment)) + + +def _iri_utf8_errors_handler(exc): + """ + Error handler for decoding UTF-8 parts of a URI into an IRI. Leaves byte + sequences encoded in %XX format, but as part of a unicode string. + + :param exc: + The UnicodeDecodeError exception + + :return: + A 2-element tuple of (replacement unicode string, integer index to + resume at) + """ + + bytes_as_ints = bytes_to_list(exc.object[exc.start:exc.end]) + replacements = ['%%%02x' % num for num in bytes_as_ints] + return (''.join(replacements), exc.end) + + +codecs.register_error('iriutf8', _iri_utf8_errors_handler) + + +def _urlquote(string, safe=''): + """ + Quotes a unicode string for use in a URL + + :param string: + A unicode string + + :param safe: + A unicode string of character to not encode + + :return: + None (if string is None) or an ASCII byte string of the quoted string + """ + + if string is None or string == '': + return None + + # Anything already hex quoted is pulled out of the URL and unquoted if + # possible + escapes = [] + if re.search('%[0-9a-fA-F]{2}', string): + # Try to unquote any percent values, restoring them if they are not + # valid UTF-8. Also, requote any safe chars since encoded versions of + # those are functionally different than the unquoted ones. + def _try_unescape(match): + byte_string = unquote_to_bytes(match.group(0)) + unicode_string = byte_string.decode('utf-8', 'iriutf8') + for safe_char in list(safe): + unicode_string = unicode_string.replace(safe_char, '%%%02x' % ord(safe_char)) + return unicode_string + string = re.sub('(?:%[0-9a-fA-F]{2})+', _try_unescape, string) + + # Once we have the minimal set of hex quoted values, removed them from + # the string so that they are not double quoted + def _extract_escape(match): + escapes.append(match.group(0).encode('ascii')) + return '\x00' + string = re.sub('%[0-9a-fA-F]{2}', _extract_escape, string) + + output = urlquote(string.encode('utf-8'), safe=safe.encode('utf-8')) + if not isinstance(output, byte_cls): + output = output.encode('ascii') + + # Restore the existing quoted values that we extracted + if len(escapes) > 0: + def _return_escape(_): + return escapes.pop(0) + output = re.sub(b'%00', _return_escape, output) + + return output + + +def _urlunquote(byte_string, remap=None, preserve=None): + """ + Unquotes a URI portion from a byte string into unicode using UTF-8 + + :param byte_string: + A byte string of the data to unquote + + :param remap: + A list of characters (as unicode) that should be re-mapped to a + %XX encoding. This is used when characters are not valid in part of a + URL. + + :param preserve: + A bool - indicates that the chars to be remapped if they occur in + non-hex form, should be preserved. E.g. / for URL path. + + :return: + A unicode string + """ + + if byte_string is None: + return byte_string + + if byte_string == b'': + return '' + + if preserve: + replacements = ['\x1A', '\x1C', '\x1D', '\x1E', '\x1F'] + preserve_unmap = {} + for char in remap: + replacement = replacements.pop(0) + preserve_unmap[replacement] = char + byte_string = byte_string.replace(char.encode('ascii'), replacement.encode('ascii')) + + byte_string = unquote_to_bytes(byte_string) + + if remap: + for char in remap: + byte_string = byte_string.replace(char.encode('ascii'), ('%%%02x' % ord(char)).encode('ascii')) + + output = byte_string.decode('utf-8', 'iriutf8') + + if preserve: + for replacement, original in preserve_unmap.items(): + output = output.replace(replacement, original) + + return output diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_ordereddict.py b/venv/lib/python2.7/site-packages/asn1crypto/_ordereddict.py new file mode 100644 index 0000000..2f18ab5 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/_ordereddict.py @@ -0,0 +1,135 @@ +# Copyright (c) 2009 Raymond Hettinger +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, +# publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. + +import sys + +if not sys.version_info < (2, 7): + + from collections import OrderedDict + +else: + + from UserDict import DictMixin + + class OrderedDict(dict, DictMixin): + + def __init__(self, *args, **kwds): + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__end + except AttributeError: + self.clear() + self.update(*args, **kwds) + + def clear(self): + self.__end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.__map = {} # key --> [key, prev, next] + dict.clear(self) + + def __setitem__(self, key, value): + if key not in self: + end = self.__end + curr = end[1] + curr[2] = end[1] = self.__map[key] = [key, curr, end] + dict.__setitem__(self, key, value) + + def __delitem__(self, key): + dict.__delitem__(self, key) + key, prev, next_ = self.__map.pop(key) + prev[2] = next_ + next_[1] = prev + + def __iter__(self): + end = self.__end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.__end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def popitem(self, last=True): + if not self: + raise KeyError('dictionary is empty') + if last: + key = reversed(self).next() + else: + key = iter(self).next() + value = self.pop(key) + return key, value + + def __reduce__(self): + items = [[k, self[k]] for k in self] + tmp = self.__map, self.__end + del self.__map, self.__end + inst_dict = vars(self).copy() + self.__map, self.__end = tmp + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def keys(self): + return list(self) + + setdefault = DictMixin.setdefault + update = DictMixin.update + pop = DictMixin.pop + values = DictMixin.values + items = DictMixin.items + iterkeys = DictMixin.iterkeys + itervalues = DictMixin.itervalues + iteritems = DictMixin.iteritems + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + + def copy(self): + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + if isinstance(other, OrderedDict): + if len(self) != len(other): + return False + for p, q in zip(self.items(), other.items()): + if p != q: + return False + return True + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_perf/__init__.py b/venv/lib/python2.7/site-packages/asn1crypto/_perf/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_perf/_big_num_ctypes.py b/venv/lib/python2.7/site-packages/asn1crypto/_perf/_big_num_ctypes.py new file mode 100644 index 0000000..8e37e9b --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/_perf/_big_num_ctypes.py @@ -0,0 +1,69 @@ +# coding: utf-8 + +""" +ctypes interface for BN_mod_inverse() function from OpenSSL. Exports the +following items: + + - libcrypto + - BN_bn2bin() + - BN_CTX_free() + - BN_CTX_new() + - BN_free() + - BN_mod_inverse() + - BN_new() + - BN_num_bits() + - BN_set_negative() + +Will raise asn1crypto._ffi.LibraryNotFoundError() if libcrypto can not be +found. Will raise asn1crypto._ffi.FFIEngineError() if there is an error +interfacing with libcrypto. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +import sys + +from ctypes import CDLL, c_int, c_char_p, c_void_p +from ctypes.util import find_library + +from .._ffi import LibraryNotFoundError, FFIEngineError + + +try: + # On Python 2, the unicode string here may raise a UnicodeDecodeError as it + # tries to join a bytestring path to the unicode name "crypto" + libcrypto_path = find_library(b'crypto' if sys.version_info < (3,) else 'crypto') + if not libcrypto_path: + raise LibraryNotFoundError('The library libcrypto could not be found') + + libcrypto = CDLL(libcrypto_path) + + libcrypto.BN_new.argtypes = [] + libcrypto.BN_new.restype = c_void_p + + libcrypto.BN_bin2bn.argtypes = [c_char_p, c_int, c_void_p] + libcrypto.BN_bin2bn.restype = c_void_p + + libcrypto.BN_bn2bin.argtypes = [c_void_p, c_char_p] + libcrypto.BN_bn2bin.restype = c_int + + libcrypto.BN_set_negative.argtypes = [c_void_p, c_int] + libcrypto.BN_set_negative.restype = None + + libcrypto.BN_num_bits.argtypes = [c_void_p] + libcrypto.BN_num_bits.restype = c_int + + libcrypto.BN_free.argtypes = [c_void_p] + libcrypto.BN_free.restype = None + + libcrypto.BN_CTX_new.argtypes = [] + libcrypto.BN_CTX_new.restype = c_void_p + + libcrypto.BN_CTX_free.argtypes = [c_void_p] + libcrypto.BN_CTX_free.restype = None + + libcrypto.BN_mod_inverse.argtypes = [c_void_p, c_void_p, c_void_p, c_void_p] + libcrypto.BN_mod_inverse.restype = c_void_p + +except (AttributeError): + raise FFIEngineError('Error initializing ctypes') diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_teletex_codec.py b/venv/lib/python2.7/site-packages/asn1crypto/_teletex_codec.py new file mode 100644 index 0000000..b5991aa --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/_teletex_codec.py @@ -0,0 +1,331 @@ +# coding: utf-8 + +""" +Implementation of the teletex T.61 codec. Exports the following items: + + - register() +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +import codecs + + +class TeletexCodec(codecs.Codec): + + def encode(self, input_, errors='strict'): + return codecs.charmap_encode(input_, errors, ENCODING_TABLE) + + def decode(self, input_, errors='strict'): + return codecs.charmap_decode(input_, errors, DECODING_TABLE) + + +class TeletexIncrementalEncoder(codecs.IncrementalEncoder): + + def encode(self, input_, final=False): + return codecs.charmap_encode(input_, self.errors, ENCODING_TABLE)[0] + + +class TeletexIncrementalDecoder(codecs.IncrementalDecoder): + + def decode(self, input_, final=False): + return codecs.charmap_decode(input_, self.errors, DECODING_TABLE)[0] + + +class TeletexStreamWriter(TeletexCodec, codecs.StreamWriter): + + pass + + +class TeletexStreamReader(TeletexCodec, codecs.StreamReader): + + pass + + +def teletex_search_function(name): + """ + Search function for teletex codec that is passed to codecs.register() + """ + + if name != 'teletex': + return None + + return codecs.CodecInfo( + name='teletex', + encode=TeletexCodec().encode, + decode=TeletexCodec().decode, + incrementalencoder=TeletexIncrementalEncoder, + incrementaldecoder=TeletexIncrementalDecoder, + streamreader=TeletexStreamReader, + streamwriter=TeletexStreamWriter, + ) + + +def register(): + """ + Registers the teletex codec + """ + + codecs.register(teletex_search_function) + + +# http://en.wikipedia.org/wiki/ITU_T.61 +DECODING_TABLE = ( + '\u0000' + '\u0001' + '\u0002' + '\u0003' + '\u0004' + '\u0005' + '\u0006' + '\u0007' + '\u0008' + '\u0009' + '\u000A' + '\u000B' + '\u000C' + '\u000D' + '\u000E' + '\u000F' + '\u0010' + '\u0011' + '\u0012' + '\u0013' + '\u0014' + '\u0015' + '\u0016' + '\u0017' + '\u0018' + '\u0019' + '\u001A' + '\u001B' + '\u001C' + '\u001D' + '\u001E' + '\u001F' + '\u0020' + '\u0021' + '\u0022' + '\ufffe' + '\ufffe' + '\u0025' + '\u0026' + '\u0027' + '\u0028' + '\u0029' + '\u002A' + '\u002B' + '\u002C' + '\u002D' + '\u002E' + '\u002F' + '\u0030' + '\u0031' + '\u0032' + '\u0033' + '\u0034' + '\u0035' + '\u0036' + '\u0037' + '\u0038' + '\u0039' + '\u003A' + '\u003B' + '\u003C' + '\u003D' + '\u003E' + '\u003F' + '\u0040' + '\u0041' + '\u0042' + '\u0043' + '\u0044' + '\u0045' + '\u0046' + '\u0047' + '\u0048' + '\u0049' + '\u004A' + '\u004B' + '\u004C' + '\u004D' + '\u004E' + '\u004F' + '\u0050' + '\u0051' + '\u0052' + '\u0053' + '\u0054' + '\u0055' + '\u0056' + '\u0057' + '\u0058' + '\u0059' + '\u005A' + '\u005B' + '\ufffe' + '\u005D' + '\ufffe' + '\u005F' + '\ufffe' + '\u0061' + '\u0062' + '\u0063' + '\u0064' + '\u0065' + '\u0066' + '\u0067' + '\u0068' + '\u0069' + '\u006A' + '\u006B' + '\u006C' + '\u006D' + '\u006E' + '\u006F' + '\u0070' + '\u0071' + '\u0072' + '\u0073' + '\u0074' + '\u0075' + '\u0076' + '\u0077' + '\u0078' + '\u0079' + '\u007A' + '\ufffe' + '\u007C' + '\ufffe' + '\ufffe' + '\u007F' + '\u0080' + '\u0081' + '\u0082' + '\u0083' + '\u0084' + '\u0085' + '\u0086' + '\u0087' + '\u0088' + '\u0089' + '\u008A' + '\u008B' + '\u008C' + '\u008D' + '\u008E' + '\u008F' + '\u0090' + '\u0091' + '\u0092' + '\u0093' + '\u0094' + '\u0095' + '\u0096' + '\u0097' + '\u0098' + '\u0099' + '\u009A' + '\u009B' + '\u009C' + '\u009D' + '\u009E' + '\u009F' + '\u00A0' + '\u00A1' + '\u00A2' + '\u00A3' + '\u0024' + '\u00A5' + '\u0023' + '\u00A7' + '\u00A4' + '\ufffe' + '\ufffe' + '\u00AB' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\u00B0' + '\u00B1' + '\u00B2' + '\u00B3' + '\u00D7' + '\u00B5' + '\u00B6' + '\u00B7' + '\u00F7' + '\ufffe' + '\ufffe' + '\u00BB' + '\u00BC' + '\u00BD' + '\u00BE' + '\u00BF' + '\ufffe' + '\u0300' + '\u0301' + '\u0302' + '\u0303' + '\u0304' + '\u0306' + '\u0307' + '\u0308' + '\ufffe' + '\u030A' + '\u0327' + '\u0332' + '\u030B' + '\u0328' + '\u030C' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\ufffe' + '\u2126' + '\u00C6' + '\u00D0' + '\u00AA' + '\u0126' + '\ufffe' + '\u0132' + '\u013F' + '\u0141' + '\u00D8' + '\u0152' + '\u00BA' + '\u00DE' + '\u0166' + '\u014A' + '\u0149' + '\u0138' + '\u00E6' + '\u0111' + '\u00F0' + '\u0127' + '\u0131' + '\u0133' + '\u0140' + '\u0142' + '\u00F8' + '\u0153' + '\u00DF' + '\u00FE' + '\u0167' + '\u014B' + '\ufffe' +) +ENCODING_TABLE = codecs.charmap_build(DECODING_TABLE) diff --git a/venv/lib/python2.7/site-packages/asn1crypto/_types.py b/venv/lib/python2.7/site-packages/asn1crypto/_types.py new file mode 100644 index 0000000..b9ca8cc --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/_types.py @@ -0,0 +1,46 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +import inspect +import sys + + +if sys.version_info < (3,): + str_cls = unicode # noqa + byte_cls = str + int_types = (int, long) # noqa + + def bytes_to_list(byte_string): + return [ord(b) for b in byte_string] + + chr_cls = chr + +else: + str_cls = str + byte_cls = bytes + int_types = int + + bytes_to_list = list + + def chr_cls(num): + return bytes([num]) + + +def type_name(value): + """ + Returns a user-readable name for the type of an object + + :param value: + A value to get the type name of + + :return: + A unicode string of the object's type name + """ + + if inspect.isclass(value): + cls = value + else: + cls = value.__class__ + if cls.__module__ in set(['builtins', '__builtin__']): + return cls.__name__ + return '%s.%s' % (cls.__module__, cls.__name__) diff --git a/venv/lib/python2.7/site-packages/asn1crypto/algos.py b/venv/lib/python2.7/site-packages/asn1crypto/algos.py new file mode 100644 index 0000000..c805433 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/algos.py @@ -0,0 +1,1143 @@ +# coding: utf-8 + +""" +ASN.1 type classes for various algorithms using in various aspects of public +key cryptography. Exports the following items: + + - AlgorithmIdentifier() + - AnyAlgorithmIdentifier() + - DigestAlgorithm() + - DigestInfo() + - DSASignature() + - EncryptionAlgorithm() + - HmacAlgorithm() + - KdfAlgorithm() + - Pkcs5MacAlgorithm() + - SignedDigestAlgorithm() + +Other type classes are defined that help compose the types listed above. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from ._errors import unwrap +from ._int import fill_width +from .util import int_from_bytes, int_to_bytes +from .core import ( + Any, + Choice, + Integer, + Null, + ObjectIdentifier, + OctetString, + Sequence, + Void, +) + + +# Structures and OIDs in this file are pulled from +# https://tools.ietf.org/html/rfc3279, https://tools.ietf.org/html/rfc4055, +# https://tools.ietf.org/html/rfc5758, https://tools.ietf.org/html/rfc7292, +# http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf + +class AlgorithmIdentifier(Sequence): + _fields = [ + ('algorithm', ObjectIdentifier), + ('parameters', Any, {'optional': True}), + ] + + +class _ForceNullParameters(object): + """ + Various structures based on AlgorithmIdentifier require that the parameters + field be core.Null() for certain OIDs. This mixin ensures that happens. + """ + + # The following attribute, plus the parameters spec callback and custom + # __setitem__ are all to handle a situation where parameters should not be + # optional and must be Null for certain OIDs. More info at + # https://tools.ietf.org/html/rfc4055#page-15 and + # https://tools.ietf.org/html/rfc4055#section-2.1 + _null_algos = set([ + '1.2.840.113549.1.1.1', # rsassa_pkcs1v15 / rsaes_pkcs1v15 / rsa + '1.2.840.113549.1.1.11', # sha256_rsa + '1.2.840.113549.1.1.12', # sha384_rsa + '1.2.840.113549.1.1.13', # sha512_rsa + '1.2.840.113549.1.1.14', # sha224_rsa + '1.3.14.3.2.26', # sha1 + '2.16.840.1.101.3.4.2.4', # sha224 + '2.16.840.1.101.3.4.2.1', # sha256 + '2.16.840.1.101.3.4.2.2', # sha384 + '2.16.840.1.101.3.4.2.3', # sha512 + ]) + + def _parameters_spec(self): + if self._oid_pair == ('algorithm', 'parameters'): + algo = self['algorithm'].native + if algo in self._oid_specs: + return self._oid_specs[algo] + + if self['algorithm'].dotted in self._null_algos: + return Null + + return None + + _spec_callbacks = { + 'parameters': _parameters_spec + } + + # We have to override this since the spec callback uses the value of + # algorithm to determine the parameter spec, however default values are + # assigned before setting a field, so a default value can't be based on + # another field value (unless it is a default also). Thus we have to + # manually check to see if the algorithm was set and parameters is unset, + # and then fix the value as appropriate. + def __setitem__(self, key, value): + res = super(_ForceNullParameters, self).__setitem__(key, value) + if key != 'algorithm': + return res + if self['algorithm'].dotted not in self._null_algos: + return res + if self['parameters'].__class__ != Void: + return res + self['parameters'] = Null() + return res + + +class HmacAlgorithmId(ObjectIdentifier): + _map = { + '1.3.14.3.2.10': 'des_mac', + '1.2.840.113549.2.7': 'sha1', + '1.2.840.113549.2.8': 'sha224', + '1.2.840.113549.2.9': 'sha256', + '1.2.840.113549.2.10': 'sha384', + '1.2.840.113549.2.11': 'sha512', + '1.2.840.113549.2.12': 'sha512_224', + '1.2.840.113549.2.13': 'sha512_256', + } + + +class HmacAlgorithm(Sequence): + _fields = [ + ('algorithm', HmacAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + +class DigestAlgorithmId(ObjectIdentifier): + _map = { + '1.2.840.113549.2.2': 'md2', + '1.2.840.113549.2.5': 'md5', + '1.3.14.3.2.26': 'sha1', + '2.16.840.1.101.3.4.2.4': 'sha224', + '2.16.840.1.101.3.4.2.1': 'sha256', + '2.16.840.1.101.3.4.2.2': 'sha384', + '2.16.840.1.101.3.4.2.3': 'sha512', + '2.16.840.1.101.3.4.2.5': 'sha512_224', + '2.16.840.1.101.3.4.2.6': 'sha512_256', + } + + +class DigestAlgorithm(_ForceNullParameters, Sequence): + _fields = [ + ('algorithm', DigestAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + +# This structure is what is signed with a SignedDigestAlgorithm +class DigestInfo(Sequence): + _fields = [ + ('digest_algorithm', DigestAlgorithm), + ('digest', OctetString), + ] + + +class MaskGenAlgorithmId(ObjectIdentifier): + _map = { + '1.2.840.113549.1.1.8': 'mgf1', + } + + +class MaskGenAlgorithm(Sequence): + _fields = [ + ('algorithm', MaskGenAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + _oid_pair = ('algorithm', 'parameters') + _oid_specs = { + 'mgf1': DigestAlgorithm + } + + +class TrailerField(Integer): + _map = { + 1: 'trailer_field_bc', + } + + +class RSASSAPSSParams(Sequence): + _fields = [ + ( + 'hash_algorithm', + DigestAlgorithm, + { + 'explicit': 0, + 'default': {'algorithm': 'sha1'}, + } + ), + ( + 'mask_gen_algorithm', + MaskGenAlgorithm, + { + 'explicit': 1, + 'default': { + 'algorithm': 'mgf1', + 'parameters': {'algorithm': 'sha1'}, + }, + } + ), + ( + 'salt_length', + Integer, + { + 'explicit': 2, + 'default': 20, + } + ), + ( + 'trailer_field', + TrailerField, + { + 'explicit': 3, + 'default': 'trailer_field_bc', + } + ), + ] + + +class SignedDigestAlgorithmId(ObjectIdentifier): + _map = { + '1.3.14.3.2.3': 'md5_rsa', + '1.3.14.3.2.29': 'sha1_rsa', + '1.3.14.7.2.3.1': 'md2_rsa', + '1.2.840.113549.1.1.2': 'md2_rsa', + '1.2.840.113549.1.1.4': 'md5_rsa', + '1.2.840.113549.1.1.5': 'sha1_rsa', + '1.2.840.113549.1.1.14': 'sha224_rsa', + '1.2.840.113549.1.1.11': 'sha256_rsa', + '1.2.840.113549.1.1.12': 'sha384_rsa', + '1.2.840.113549.1.1.13': 'sha512_rsa', + '1.2.840.113549.1.1.10': 'rsassa_pss', + '1.2.840.10040.4.3': 'sha1_dsa', + '1.3.14.3.2.13': 'sha1_dsa', + '1.3.14.3.2.27': 'sha1_dsa', + '2.16.840.1.101.3.4.3.1': 'sha224_dsa', + '2.16.840.1.101.3.4.3.2': 'sha256_dsa', + '1.2.840.10045.4.1': 'sha1_ecdsa', + '1.2.840.10045.4.3.1': 'sha224_ecdsa', + '1.2.840.10045.4.3.2': 'sha256_ecdsa', + '1.2.840.10045.4.3.3': 'sha384_ecdsa', + '1.2.840.10045.4.3.4': 'sha512_ecdsa', + # For when the digest is specified elsewhere in a Sequence + '1.2.840.113549.1.1.1': 'rsassa_pkcs1v15', + '1.2.840.10040.4.1': 'dsa', + '1.2.840.10045.4': 'ecdsa', + } + + _reverse_map = { + 'dsa': '1.2.840.10040.4.1', + 'ecdsa': '1.2.840.10045.4', + 'md2_rsa': '1.2.840.113549.1.1.2', + 'md5_rsa': '1.2.840.113549.1.1.4', + 'rsassa_pkcs1v15': '1.2.840.113549.1.1.1', + 'rsassa_pss': '1.2.840.113549.1.1.10', + 'sha1_dsa': '1.2.840.10040.4.3', + 'sha1_ecdsa': '1.2.840.10045.4.1', + 'sha1_rsa': '1.2.840.113549.1.1.5', + 'sha224_dsa': '2.16.840.1.101.3.4.3.1', + 'sha224_ecdsa': '1.2.840.10045.4.3.1', + 'sha224_rsa': '1.2.840.113549.1.1.14', + 'sha256_dsa': '2.16.840.1.101.3.4.3.2', + 'sha256_ecdsa': '1.2.840.10045.4.3.2', + 'sha256_rsa': '1.2.840.113549.1.1.11', + 'sha384_ecdsa': '1.2.840.10045.4.3.3', + 'sha384_rsa': '1.2.840.113549.1.1.12', + 'sha512_ecdsa': '1.2.840.10045.4.3.4', + 'sha512_rsa': '1.2.840.113549.1.1.13', + } + + +class SignedDigestAlgorithm(_ForceNullParameters, Sequence): + _fields = [ + ('algorithm', SignedDigestAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + _oid_pair = ('algorithm', 'parameters') + _oid_specs = { + 'rsassa_pss': RSASSAPSSParams, + } + + @property + def signature_algo(self): + """ + :return: + A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa" or + "ecdsa" + """ + + algorithm = self['algorithm'].native + + algo_map = { + 'md2_rsa': 'rsassa_pkcs1v15', + 'md5_rsa': 'rsassa_pkcs1v15', + 'sha1_rsa': 'rsassa_pkcs1v15', + 'sha224_rsa': 'rsassa_pkcs1v15', + 'sha256_rsa': 'rsassa_pkcs1v15', + 'sha384_rsa': 'rsassa_pkcs1v15', + 'sha512_rsa': 'rsassa_pkcs1v15', + 'rsassa_pkcs1v15': 'rsassa_pkcs1v15', + 'rsassa_pss': 'rsassa_pss', + 'sha1_dsa': 'dsa', + 'sha224_dsa': 'dsa', + 'sha256_dsa': 'dsa', + 'dsa': 'dsa', + 'sha1_ecdsa': 'ecdsa', + 'sha224_ecdsa': 'ecdsa', + 'sha256_ecdsa': 'ecdsa', + 'sha384_ecdsa': 'ecdsa', + 'sha512_ecdsa': 'ecdsa', + 'ecdsa': 'ecdsa', + } + if algorithm in algo_map: + return algo_map[algorithm] + + raise ValueError(unwrap( + ''' + Signature algorithm not known for %s + ''', + algorithm + )) + + @property + def hash_algo(self): + """ + :return: + A unicode string of "md2", "md5", "sha1", "sha224", "sha256", + "sha384", "sha512", "sha512_224", "sha512_256" + """ + + algorithm = self['algorithm'].native + + algo_map = { + 'md2_rsa': 'md2', + 'md5_rsa': 'md5', + 'sha1_rsa': 'sha1', + 'sha224_rsa': 'sha224', + 'sha256_rsa': 'sha256', + 'sha384_rsa': 'sha384', + 'sha512_rsa': 'sha512', + 'sha1_dsa': 'sha1', + 'sha224_dsa': 'sha224', + 'sha256_dsa': 'sha256', + 'sha1_ecdsa': 'sha1', + 'sha224_ecdsa': 'sha224', + 'sha256_ecdsa': 'sha256', + 'sha384_ecdsa': 'sha384', + 'sha512_ecdsa': 'sha512', + } + if algorithm in algo_map: + return algo_map[algorithm] + + if algorithm == 'rsassa_pss': + return self['parameters']['hash_algorithm']['algorithm'].native + + raise ValueError(unwrap( + ''' + Hash algorithm not known for %s + ''', + algorithm + )) + + +class Pbkdf2Salt(Choice): + _alternatives = [ + ('specified', OctetString), + ('other_source', AlgorithmIdentifier), + ] + + +class Pbkdf2Params(Sequence): + _fields = [ + ('salt', Pbkdf2Salt), + ('iteration_count', Integer), + ('key_length', Integer, {'optional': True}), + ('prf', HmacAlgorithm, {'default': {'algorithm': 'sha1'}}), + ] + + +class KdfAlgorithmId(ObjectIdentifier): + _map = { + '1.2.840.113549.1.5.12': 'pbkdf2' + } + + +class KdfAlgorithm(Sequence): + _fields = [ + ('algorithm', KdfAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + _oid_pair = ('algorithm', 'parameters') + _oid_specs = { + 'pbkdf2': Pbkdf2Params + } + + +class DHParameters(Sequence): + """ + Original Name: DHParameter + Source: ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-3.asc section 9 + """ + + _fields = [ + ('p', Integer), + ('g', Integer), + ('private_value_length', Integer, {'optional': True}), + ] + + +class KeyExchangeAlgorithmId(ObjectIdentifier): + _map = { + '1.2.840.113549.1.3.1': 'dh', + } + + +class KeyExchangeAlgorithm(Sequence): + _fields = [ + ('algorithm', KeyExchangeAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + _oid_pair = ('algorithm', 'parameters') + _oid_specs = { + 'dh': DHParameters, + } + + +class Rc2Params(Sequence): + _fields = [ + ('rc2_parameter_version', Integer, {'optional': True}), + ('iv', OctetString), + ] + + +class Rc5ParamVersion(Integer): + _map = { + 16: 'v1-0' + } + + +class Rc5Params(Sequence): + _fields = [ + ('version', Rc5ParamVersion), + ('rounds', Integer), + ('block_size_in_bits', Integer), + ('iv', OctetString, {'optional': True}), + ] + + +class Pbes1Params(Sequence): + _fields = [ + ('salt', OctetString), + ('iterations', Integer), + ] + + +class PSourceAlgorithmId(ObjectIdentifier): + _map = { + '1.2.840.113549.1.1.9': 'p_specified', + } + + +class PSourceAlgorithm(Sequence): + _fields = [ + ('algorithm', PSourceAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + _oid_pair = ('algorithm', 'parameters') + _oid_specs = { + 'p_specified': OctetString + } + + +class RSAESOAEPParams(Sequence): + _fields = [ + ( + 'hash_algorithm', + DigestAlgorithm, + { + 'explicit': 0, + 'default': {'algorithm': 'sha1'} + } + ), + ( + 'mask_gen_algorithm', + MaskGenAlgorithm, + { + 'explicit': 1, + 'default': { + 'algorithm': 'mgf1', + 'parameters': {'algorithm': 'sha1'} + } + } + ), + ( + 'p_source_algorithm', + PSourceAlgorithm, + { + 'explicit': 2, + 'default': { + 'algorithm': 'p_specified', + 'parameters': b'' + } + } + ), + ] + + +class DSASignature(Sequence): + """ + An ASN.1 class for translating between the OS crypto library's + representation of an (EC)DSA signature and the ASN.1 structure that is part + of various RFCs. + + Original Name: DSS-Sig-Value + Source: https://tools.ietf.org/html/rfc3279#section-2.2.2 + """ + + _fields = [ + ('r', Integer), + ('s', Integer), + ] + + @classmethod + def from_p1363(cls, data): + """ + Reads a signature from a byte string encoding accordint to IEEE P1363, + which is used by Microsoft's BCryptSignHash() function. + + :param data: + A byte string from BCryptSignHash() + + :return: + A DSASignature object + """ + + r = int_from_bytes(data[0:len(data) // 2]) + s = int_from_bytes(data[len(data) // 2:]) + return cls({'r': r, 's': s}) + + def to_p1363(self): + """ + Dumps a signature to a byte string compatible with Microsoft's + BCryptVerifySignature() function. + + :return: + A byte string compatible with BCryptVerifySignature() + """ + + r_bytes = int_to_bytes(self['r'].native) + s_bytes = int_to_bytes(self['s'].native) + + int_byte_length = max(len(r_bytes), len(s_bytes)) + r_bytes = fill_width(r_bytes, int_byte_length) + s_bytes = fill_width(s_bytes, int_byte_length) + + return r_bytes + s_bytes + + +class EncryptionAlgorithmId(ObjectIdentifier): + _map = { + '1.3.14.3.2.7': 'des', + '1.2.840.113549.3.7': 'tripledes_3key', + '1.2.840.113549.3.2': 'rc2', + '1.2.840.113549.3.9': 'rc5', + # From http://csrc.nist.gov/groups/ST/crypto_apps_infra/csor/algorithms.html#AES + '2.16.840.1.101.3.4.1.1': 'aes128_ecb', + '2.16.840.1.101.3.4.1.2': 'aes128_cbc', + '2.16.840.1.101.3.4.1.3': 'aes128_ofb', + '2.16.840.1.101.3.4.1.4': 'aes128_cfb', + '2.16.840.1.101.3.4.1.5': 'aes128_wrap', + '2.16.840.1.101.3.4.1.6': 'aes128_gcm', + '2.16.840.1.101.3.4.1.7': 'aes128_ccm', + '2.16.840.1.101.3.4.1.8': 'aes128_wrap_pad', + '2.16.840.1.101.3.4.1.21': 'aes192_ecb', + '2.16.840.1.101.3.4.1.22': 'aes192_cbc', + '2.16.840.1.101.3.4.1.23': 'aes192_ofb', + '2.16.840.1.101.3.4.1.24': 'aes192_cfb', + '2.16.840.1.101.3.4.1.25': 'aes192_wrap', + '2.16.840.1.101.3.4.1.26': 'aes192_gcm', + '2.16.840.1.101.3.4.1.27': 'aes192_ccm', + '2.16.840.1.101.3.4.1.28': 'aes192_wrap_pad', + '2.16.840.1.101.3.4.1.41': 'aes256_ecb', + '2.16.840.1.101.3.4.1.42': 'aes256_cbc', + '2.16.840.1.101.3.4.1.43': 'aes256_ofb', + '2.16.840.1.101.3.4.1.44': 'aes256_cfb', + '2.16.840.1.101.3.4.1.45': 'aes256_wrap', + '2.16.840.1.101.3.4.1.46': 'aes256_gcm', + '2.16.840.1.101.3.4.1.47': 'aes256_ccm', + '2.16.840.1.101.3.4.1.48': 'aes256_wrap_pad', + # From PKCS#5 + '1.2.840.113549.1.5.13': 'pbes2', + '1.2.840.113549.1.5.1': 'pbes1_md2_des', + '1.2.840.113549.1.5.3': 'pbes1_md5_des', + '1.2.840.113549.1.5.4': 'pbes1_md2_rc2', + '1.2.840.113549.1.5.6': 'pbes1_md5_rc2', + '1.2.840.113549.1.5.10': 'pbes1_sha1_des', + '1.2.840.113549.1.5.11': 'pbes1_sha1_rc2', + # From PKCS#12 + '1.2.840.113549.1.12.1.1': 'pkcs12_sha1_rc4_128', + '1.2.840.113549.1.12.1.2': 'pkcs12_sha1_rc4_40', + '1.2.840.113549.1.12.1.3': 'pkcs12_sha1_tripledes_3key', + '1.2.840.113549.1.12.1.4': 'pkcs12_sha1_tripledes_2key', + '1.2.840.113549.1.12.1.5': 'pkcs12_sha1_rc2_128', + '1.2.840.113549.1.12.1.6': 'pkcs12_sha1_rc2_40', + # PKCS#1 v2.2 + '1.2.840.113549.1.1.1': 'rsaes_pkcs1v15', + '1.2.840.113549.1.1.7': 'rsaes_oaep', + } + + +class EncryptionAlgorithm(_ForceNullParameters, Sequence): + _fields = [ + ('algorithm', EncryptionAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + _oid_pair = ('algorithm', 'parameters') + _oid_specs = { + 'des': OctetString, + 'tripledes_3key': OctetString, + 'rc2': Rc2Params, + 'rc5': Rc5Params, + 'aes128_cbc': OctetString, + 'aes192_cbc': OctetString, + 'aes256_cbc': OctetString, + 'aes128_ofb': OctetString, + 'aes192_ofb': OctetString, + 'aes256_ofb': OctetString, + # From PKCS#5 + 'pbes1_md2_des': Pbes1Params, + 'pbes1_md5_des': Pbes1Params, + 'pbes1_md2_rc2': Pbes1Params, + 'pbes1_md5_rc2': Pbes1Params, + 'pbes1_sha1_des': Pbes1Params, + 'pbes1_sha1_rc2': Pbes1Params, + # From PKCS#12 + 'pkcs12_sha1_rc4_128': Pbes1Params, + 'pkcs12_sha1_rc4_40': Pbes1Params, + 'pkcs12_sha1_tripledes_3key': Pbes1Params, + 'pkcs12_sha1_tripledes_2key': Pbes1Params, + 'pkcs12_sha1_rc2_128': Pbes1Params, + 'pkcs12_sha1_rc2_40': Pbes1Params, + # PKCS#1 v2.2 + 'rsaes_oaep': RSAESOAEPParams, + } + + @property + def kdf(self): + """ + Returns the name of the key derivation function to use. + + :return: + A unicode from of one of the following: "pbkdf1", "pbkdf2", + "pkcs12_kdf" + """ + + encryption_algo = self['algorithm'].native + + if encryption_algo == 'pbes2': + return self['parameters']['key_derivation_func']['algorithm'].native + + if encryption_algo.find('.') == -1: + if encryption_algo.find('_') != -1: + encryption_algo, _ = encryption_algo.split('_', 1) + + if encryption_algo == 'pbes1': + return 'pbkdf1' + + if encryption_algo == 'pkcs12': + return 'pkcs12_kdf' + + raise ValueError(unwrap( + ''' + Encryption algorithm "%s" does not have a registered key + derivation function + ''', + encryption_algo + )) + + raise ValueError(unwrap( + ''' + Unrecognized encryption algorithm "%s", can not determine key + derivation function + ''', + encryption_algo + )) + + @property + def kdf_hmac(self): + """ + Returns the HMAC algorithm to use with the KDF. + + :return: + A unicode string of one of the following: "md2", "md5", "sha1", + "sha224", "sha256", "sha384", "sha512" + """ + + encryption_algo = self['algorithm'].native + + if encryption_algo == 'pbes2': + return self['parameters']['key_derivation_func']['parameters']['prf']['algorithm'].native + + if encryption_algo.find('.') == -1: + if encryption_algo.find('_') != -1: + _, hmac_algo, _ = encryption_algo.split('_', 2) + return hmac_algo + + raise ValueError(unwrap( + ''' + Encryption algorithm "%s" does not have a registered key + derivation function + ''', + encryption_algo + )) + + raise ValueError(unwrap( + ''' + Unrecognized encryption algorithm "%s", can not determine key + derivation hmac algorithm + ''', + encryption_algo + )) + + @property + def kdf_salt(self): + """ + Returns the byte string to use as the salt for the KDF. + + :return: + A byte string + """ + + encryption_algo = self['algorithm'].native + + if encryption_algo == 'pbes2': + salt = self['parameters']['key_derivation_func']['parameters']['salt'] + + if salt.name == 'other_source': + raise ValueError(unwrap( + ''' + Can not determine key derivation salt - the + reserved-for-future-use other source salt choice was + specified in the PBKDF2 params structure + ''' + )) + + return salt.native + + if encryption_algo.find('.') == -1: + if encryption_algo.find('_') != -1: + return self['parameters']['salt'].native + + raise ValueError(unwrap( + ''' + Encryption algorithm "%s" does not have a registered key + derivation function + ''', + encryption_algo + )) + + raise ValueError(unwrap( + ''' + Unrecognized encryption algorithm "%s", can not determine key + derivation salt + ''', + encryption_algo + )) + + @property + def kdf_iterations(self): + """ + Returns the number of iterations that should be run via the KDF. + + :return: + An integer + """ + + encryption_algo = self['algorithm'].native + + if encryption_algo == 'pbes2': + return self['parameters']['key_derivation_func']['parameters']['iteration_count'].native + + if encryption_algo.find('.') == -1: + if encryption_algo.find('_') != -1: + return self['parameters']['iterations'].native + + raise ValueError(unwrap( + ''' + Encryption algorithm "%s" does not have a registered key + derivation function + ''', + encryption_algo + )) + + raise ValueError(unwrap( + ''' + Unrecognized encryption algorithm "%s", can not determine key + derivation iterations + ''', + encryption_algo + )) + + @property + def key_length(self): + """ + Returns the key length to pass to the cipher/kdf. The PKCS#5 spec does + not specify a way to store the RC5 key length, however this tends not + to be a problem since OpenSSL does not support RC5 in PKCS#8 and OS X + does not provide an RC5 cipher for use in the Security Transforms + library. + + :raises: + ValueError - when the key length can not be determined + + :return: + An integer representing the length in bytes + """ + + encryption_algo = self['algorithm'].native + + if encryption_algo[0:3] == 'aes': + return { + 'aes128_': 16, + 'aes192_': 24, + 'aes256_': 32, + }[encryption_algo[0:7]] + + cipher_lengths = { + 'des': 8, + 'tripledes_3key': 24, + } + + if encryption_algo in cipher_lengths: + return cipher_lengths[encryption_algo] + + if encryption_algo == 'rc2': + rc2_params = self['parameters'].parsed['encryption_scheme']['parameters'].parsed + rc2_parameter_version = rc2_params['rc2_parameter_version'].native + + # See page 24 of + # http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf + encoded_key_bits_map = { + 160: 5, # 40-bit + 120: 8, # 64-bit + 58: 16, # 128-bit + } + + if rc2_parameter_version in encoded_key_bits_map: + return encoded_key_bits_map[rc2_parameter_version] + + if rc2_parameter_version >= 256: + return rc2_parameter_version + + if rc2_parameter_version is None: + return 4 # 32-bit default + + raise ValueError(unwrap( + ''' + Invalid RC2 parameter version found in EncryptionAlgorithm + parameters + ''' + )) + + if encryption_algo == 'pbes2': + key_length = self['parameters']['key_derivation_func']['parameters']['key_length'].native + if key_length is not None: + return key_length + + # If the KDF params don't specify the key size, we can infer it from + # the encryption scheme for all schemes except for RC5. However, in + # practical terms, neither OpenSSL or OS X support RC5 for PKCS#8 + # so it is unlikely to be an issue that is run into. + + return self['parameters']['encryption_scheme'].key_length + + if encryption_algo.find('.') == -1: + return { + 'pbes1_md2_des': 8, + 'pbes1_md5_des': 8, + 'pbes1_md2_rc2': 8, + 'pbes1_md5_rc2': 8, + 'pbes1_sha1_des': 8, + 'pbes1_sha1_rc2': 8, + 'pkcs12_sha1_rc4_128': 16, + 'pkcs12_sha1_rc4_40': 5, + 'pkcs12_sha1_tripledes_3key': 24, + 'pkcs12_sha1_tripledes_2key': 16, + 'pkcs12_sha1_rc2_128': 16, + 'pkcs12_sha1_rc2_40': 5, + }[encryption_algo] + + raise ValueError(unwrap( + ''' + Unrecognized encryption algorithm "%s" + ''', + encryption_algo + )) + + @property + def encryption_mode(self): + """ + Returns the name of the encryption mode to use. + + :return: + A unicode string from one of the following: "cbc", "ecb", "ofb", + "cfb", "wrap", "gcm", "ccm", "wrap_pad" + """ + + encryption_algo = self['algorithm'].native + + if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']): + return encryption_algo[7:] + + if encryption_algo[0:6] == 'pbes1_': + return 'cbc' + + if encryption_algo[0:7] == 'pkcs12_': + return 'cbc' + + if encryption_algo in set(['des', 'tripledes_3key', 'rc2', 'rc5']): + return 'cbc' + + if encryption_algo == 'pbes2': + return self['parameters']['encryption_scheme'].encryption_mode + + raise ValueError(unwrap( + ''' + Unrecognized encryption algorithm "%s" + ''', + encryption_algo + )) + + @property + def encryption_cipher(self): + """ + Returns the name of the symmetric encryption cipher to use. The key + length can be retrieved via the .key_length property to disabiguate + between different variations of TripleDES, AES, and the RC* ciphers. + + :return: + A unicode string from one of the following: "rc2", "rc5", "des", + "tripledes", "aes" + """ + + encryption_algo = self['algorithm'].native + + if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']): + return 'aes' + + if encryption_algo in set(['des', 'rc2', 'rc5']): + return encryption_algo + + if encryption_algo == 'tripledes_3key': + return 'tripledes' + + if encryption_algo == 'pbes2': + return self['parameters']['encryption_scheme'].encryption_cipher + + if encryption_algo.find('.') == -1: + return { + 'pbes1_md2_des': 'des', + 'pbes1_md5_des': 'des', + 'pbes1_md2_rc2': 'rc2', + 'pbes1_md5_rc2': 'rc2', + 'pbes1_sha1_des': 'des', + 'pbes1_sha1_rc2': 'rc2', + 'pkcs12_sha1_rc4_128': 'rc4', + 'pkcs12_sha1_rc4_40': 'rc4', + 'pkcs12_sha1_tripledes_3key': 'tripledes', + 'pkcs12_sha1_tripledes_2key': 'tripledes', + 'pkcs12_sha1_rc2_128': 'rc2', + 'pkcs12_sha1_rc2_40': 'rc2', + }[encryption_algo] + + raise ValueError(unwrap( + ''' + Unrecognized encryption algorithm "%s" + ''', + encryption_algo + )) + + @property + def encryption_block_size(self): + """ + Returns the block size of the encryption cipher, in bytes. + + :return: + An integer that is the block size in bytes + """ + + encryption_algo = self['algorithm'].native + + if encryption_algo[0:7] in set(['aes128_', 'aes192_', 'aes256_']): + return 16 + + cipher_map = { + 'des': 8, + 'tripledes_3key': 8, + 'rc2': 8, + } + if encryption_algo in cipher_map: + return cipher_map[encryption_algo] + + if encryption_algo == 'rc5': + return self['parameters'].parsed['block_size_in_bits'].native / 8 + + if encryption_algo == 'pbes2': + return self['parameters']['encryption_scheme'].encryption_block_size + + if encryption_algo.find('.') == -1: + return { + 'pbes1_md2_des': 8, + 'pbes1_md5_des': 8, + 'pbes1_md2_rc2': 8, + 'pbes1_md5_rc2': 8, + 'pbes1_sha1_des': 8, + 'pbes1_sha1_rc2': 8, + 'pkcs12_sha1_rc4_128': 0, + 'pkcs12_sha1_rc4_40': 0, + 'pkcs12_sha1_tripledes_3key': 8, + 'pkcs12_sha1_tripledes_2key': 8, + 'pkcs12_sha1_rc2_128': 8, + 'pkcs12_sha1_rc2_40': 8, + }[encryption_algo] + + raise ValueError(unwrap( + ''' + Unrecognized encryption algorithm "%s" + ''', + encryption_algo + )) + + @property + def encryption_iv(self): + """ + Returns the byte string of the initialization vector for the encryption + scheme. Only the PBES2 stores the IV in the params. For PBES1, the IV + is derived from the KDF and this property will return None. + + :return: + A byte string or None + """ + + encryption_algo = self['algorithm'].native + + if encryption_algo in set(['rc2', 'rc5']): + return self['parameters'].parsed['iv'].native + + # For DES/Triple DES and AES the IV is the entirety of the parameters + octet_string_iv_oids = set([ + 'des', + 'tripledes_3key', + 'aes128_cbc', + 'aes192_cbc', + 'aes256_cbc', + 'aes128_ofb', + 'aes192_ofb', + 'aes256_ofb', + ]) + if encryption_algo in octet_string_iv_oids: + return self['parameters'].native + + if encryption_algo == 'pbes2': + return self['parameters']['encryption_scheme'].encryption_iv + + # All of the PBES1 algos use their KDF to create the IV. For the pbkdf1, + # the KDF is told to generate a key that is an extra 8 bytes long, and + # that is used for the IV. For the PKCS#12 KDF, it is called with an id + # of 2 to generate the IV. In either case, we can't return the IV + # without knowing the user's password. + if encryption_algo.find('.') == -1: + return None + + raise ValueError(unwrap( + ''' + Unrecognized encryption algorithm "%s" + ''', + encryption_algo + )) + + +class Pbes2Params(Sequence): + _fields = [ + ('key_derivation_func', KdfAlgorithm), + ('encryption_scheme', EncryptionAlgorithm), + ] + + +class Pbmac1Params(Sequence): + _fields = [ + ('key_derivation_func', KdfAlgorithm), + ('message_auth_scheme', HmacAlgorithm), + ] + + +class Pkcs5MacId(ObjectIdentifier): + _map = { + '1.2.840.113549.1.5.14': 'pbmac1', + } + + +class Pkcs5MacAlgorithm(Sequence): + _fields = [ + ('algorithm', Pkcs5MacId), + ('parameters', Any), + ] + + _oid_pair = ('algorithm', 'parameters') + _oid_specs = { + 'pbmac1': Pbmac1Params, + } + + +EncryptionAlgorithm._oid_specs['pbes2'] = Pbes2Params + + +class AnyAlgorithmId(ObjectIdentifier): + _map = {} + + def _setup(self): + _map = self.__class__._map + for other_cls in (EncryptionAlgorithmId, SignedDigestAlgorithmId, DigestAlgorithmId): + for oid, name in other_cls._map.items(): + _map[oid] = name + + +class AnyAlgorithmIdentifier(_ForceNullParameters, Sequence): + _fields = [ + ('algorithm', AnyAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + _oid_pair = ('algorithm', 'parameters') + _oid_specs = {} + + def _setup(self): + Sequence._setup(self) + specs = self.__class__._oid_specs + for other_cls in (EncryptionAlgorithm, SignedDigestAlgorithm): + for oid, spec in other_cls._oid_specs.items(): + specs[oid] = spec diff --git a/venv/lib/python2.7/site-packages/asn1crypto/cms.py b/venv/lib/python2.7/site-packages/asn1crypto/cms.py new file mode 100644 index 0000000..9cad949 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/cms.py @@ -0,0 +1,932 @@ +# coding: utf-8 + +""" +ASN.1 type classes for cryptographic message syntax (CMS). Structures are also +compatible with PKCS#7. Exports the following items: + + - AuthenticatedData() + - AuthEnvelopedData() + - CompressedData() + - ContentInfo() + - DigestedData() + - EncryptedData() + - EnvelopedData() + - SignedAndEnvelopedData() + - SignedData() + +Other type classes are defined that help compose the types listed above. + +Most CMS structures in the wild are formatted as ContentInfo encapsulating one of the other types. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +try: + import zlib +except (ImportError): + zlib = None + +from .algos import ( + _ForceNullParameters, + DigestAlgorithm, + EncryptionAlgorithm, + HmacAlgorithm, + KdfAlgorithm, + SignedDigestAlgorithm, +) +from .core import ( + Any, + BitString, + Choice, + Enumerated, + GeneralizedTime, + Integer, + ObjectIdentifier, + OctetBitString, + OctetString, + ParsableOctetString, + Sequence, + SequenceOf, + SetOf, + UTCTime, + UTF8String, +) +from .crl import CertificateList +from .keys import PublicKeyInfo +from .ocsp import OCSPResponse +from .x509 import Attributes, Certificate, Extensions, GeneralName, GeneralNames, Name + + +# These structures are taken from +# ftp://ftp.rsasecurity.com/pub/pkcs/ascii/pkcs-6.asc + +class ExtendedCertificateInfo(Sequence): + _fields = [ + ('version', Integer), + ('certificate', Certificate), + ('attributes', Attributes), + ] + + +class ExtendedCertificate(Sequence): + _fields = [ + ('extended_certificate_info', ExtendedCertificateInfo), + ('signature_algorithm', SignedDigestAlgorithm), + ('signature', OctetBitString), + ] + + +# These structures are taken from https://tools.ietf.org/html/rfc5652, +# https://tools.ietf.org/html/rfc5083, http://tools.ietf.org/html/rfc2315, +# https://tools.ietf.org/html/rfc5940, https://tools.ietf.org/html/rfc3274, +# https://tools.ietf.org/html/rfc3281 + + +class CMSVersion(Integer): + _map = { + 0: 'v0', + 1: 'v1', + 2: 'v2', + 3: 'v3', + 4: 'v4', + 5: 'v5', + } + + +class CMSAttributeType(ObjectIdentifier): + _map = { + '1.2.840.113549.1.9.3': 'content_type', + '1.2.840.113549.1.9.4': 'message_digest', + '1.2.840.113549.1.9.5': 'signing_time', + '1.2.840.113549.1.9.6': 'counter_signature', + # https://tools.ietf.org/html/rfc3161#page-20 + '1.2.840.113549.1.9.16.2.14': 'signature_time_stamp_token', + # https://tools.ietf.org/html/rfc6211#page-5 + '1.2.840.113549.1.9.52': 'cms_algorithm_protection', + } + + +class Time(Choice): + _alternatives = [ + ('utc_time', UTCTime), + ('generalized_time', GeneralizedTime), + ] + + +class ContentType(ObjectIdentifier): + _map = { + '1.2.840.113549.1.7.1': 'data', + '1.2.840.113549.1.7.2': 'signed_data', + '1.2.840.113549.1.7.3': 'enveloped_data', + '1.2.840.113549.1.7.4': 'signed_and_enveloped_data', + '1.2.840.113549.1.7.5': 'digested_data', + '1.2.840.113549.1.7.6': 'encrypted_data', + '1.2.840.113549.1.9.16.1.2': 'authenticated_data', + '1.2.840.113549.1.9.16.1.9': 'compressed_data', + '1.2.840.113549.1.9.16.1.23': 'authenticated_enveloped_data', + } + + +class CMSAlgorithmProtection(Sequence): + _fields = [ + ('digest_algorithm', DigestAlgorithm), + ('signature_algorithm', SignedDigestAlgorithm, {'implicit': 1, 'optional': True}), + ('mac_algorithm', HmacAlgorithm, {'implicit': 2, 'optional': True}), + ] + + +class SetOfContentType(SetOf): + _child_spec = ContentType + + +class SetOfOctetString(SetOf): + _child_spec = OctetString + + +class SetOfTime(SetOf): + _child_spec = Time + + +class SetOfAny(SetOf): + _child_spec = Any + + +class SetOfCMSAlgorithmProtection(SetOf): + _child_spec = CMSAlgorithmProtection + + +class CMSAttribute(Sequence): + _fields = [ + ('type', CMSAttributeType), + ('values', None), + ] + + _oid_specs = {} + + def _values_spec(self): + return self._oid_specs.get(self['type'].native, SetOfAny) + + _spec_callbacks = { + 'values': _values_spec + } + + +class CMSAttributes(SetOf): + _child_spec = CMSAttribute + + +class IssuerSerial(Sequence): + _fields = [ + ('issuer', GeneralNames), + ('serial', Integer), + ('issuer_uid', OctetBitString, {'optional': True}), + ] + + +class AttCertVersion(Integer): + _map = { + 0: 'v1', + 1: 'v2', + } + + +class AttCertSubject(Choice): + _alternatives = [ + ('base_certificate_id', IssuerSerial, {'explicit': 0}), + ('subject_name', GeneralNames, {'explicit': 1}), + ] + + +class AttCertValidityPeriod(Sequence): + _fields = [ + ('not_before_time', GeneralizedTime), + ('not_after_time', GeneralizedTime), + ] + + +class AttributeCertificateInfoV1(Sequence): + _fields = [ + ('version', AttCertVersion, {'default': 'v1'}), + ('subject', AttCertSubject), + ('issuer', GeneralNames), + ('signature', SignedDigestAlgorithm), + ('serial_number', Integer), + ('att_cert_validity_period', AttCertValidityPeriod), + ('attributes', Attributes), + ('issuer_unique_id', OctetBitString, {'optional': True}), + ('extensions', Extensions, {'optional': True}), + ] + + +class AttributeCertificateV1(Sequence): + _fields = [ + ('ac_info', AttributeCertificateInfoV1), + ('signature_algorithm', SignedDigestAlgorithm), + ('signature', OctetBitString), + ] + + +class DigestedObjectType(Enumerated): + _map = { + 0: 'public_key', + 1: 'public_key_cert', + 2: 'other_objy_types', + } + + +class ObjectDigestInfo(Sequence): + _fields = [ + ('digested_object_type', DigestedObjectType), + ('other_object_type_id', ObjectIdentifier, {'optional': True}), + ('digest_algorithm', DigestAlgorithm), + ('object_digest', OctetBitString), + ] + + +class Holder(Sequence): + _fields = [ + ('base_certificate_id', IssuerSerial, {'implicit': 0, 'optional': True}), + ('entity_name', GeneralNames, {'implicit': 1, 'optional': True}), + ('object_digest_info', ObjectDigestInfo, {'implicit': 2, 'optional': True}), + ] + + +class V2Form(Sequence): + _fields = [ + ('issuer_name', GeneralNames, {'optional': True}), + ('base_certificate_id', IssuerSerial, {'explicit': 0, 'optional': True}), + ('object_digest_info', ObjectDigestInfo, {'explicit': 1, 'optional': True}), + ] + + +class AttCertIssuer(Choice): + _alternatives = [ + ('v1_form', GeneralNames), + ('v2_form', V2Form, {'explicit': 0}), + ] + + +class IetfAttrValue(Choice): + _alternatives = [ + ('octets', OctetString), + ('oid', ObjectIdentifier), + ('string', UTF8String), + ] + + +class IetfAttrValues(SequenceOf): + _child_spec = IetfAttrValue + + +class IetfAttrSyntax(Sequence): + _fields = [ + ('policy_authority', GeneralNames, {'implicit': 0, 'optional': True}), + ('values', IetfAttrValues), + ] + + +class SetOfIetfAttrSyntax(SetOf): + _child_spec = IetfAttrSyntax + + +class SvceAuthInfo(Sequence): + _fields = [ + ('service', GeneralName), + ('ident', GeneralName), + ('auth_info', OctetString, {'optional': True}), + ] + + +class SetOfSvceAuthInfo(SetOf): + _child_spec = SvceAuthInfo + + +class RoleSyntax(Sequence): + _fields = [ + ('role_authority', GeneralNames, {'implicit': 0, 'optional': True}), + ('role_name', GeneralName, {'implicit': 1}), + ] + + +class SetOfRoleSyntax(SetOf): + _child_spec = RoleSyntax + + +class ClassList(BitString): + _map = { + 0: 'unmarked', + 1: 'unclassified', + 2: 'restricted', + 3: 'confidential', + 4: 'secret', + 5: 'top_secret', + } + + +class SecurityCategory(Sequence): + _fields = [ + ('type', ObjectIdentifier, {'implicit': 0}), + ('value', Any, {'implicit': 1}), + ] + + +class SetOfSecurityCategory(SetOf): + _child_spec = SecurityCategory + + +class Clearance(Sequence): + _fields = [ + ('policy_id', ObjectIdentifier, {'implicit': 0}), + ('class_list', ClassList, {'implicit': 1, 'default': 'unclassified'}), + ('security_categories', SetOfSecurityCategory, {'implicit': 2, 'optional': True}), + ] + + +class SetOfClearance(SetOf): + _child_spec = Clearance + + +class BigTime(Sequence): + _fields = [ + ('major', Integer), + ('fractional_seconds', Integer), + ('sign', Integer, {'optional': True}), + ] + + +class LeapData(Sequence): + _fields = [ + ('leap_time', BigTime), + ('action', Integer), + ] + + +class SetOfLeapData(SetOf): + _child_spec = LeapData + + +class TimingMetrics(Sequence): + _fields = [ + ('ntp_time', BigTime), + ('offset', BigTime), + ('delay', BigTime), + ('expiration', BigTime), + ('leap_event', SetOfLeapData, {'optional': True}), + ] + + +class SetOfTimingMetrics(SetOf): + _child_spec = TimingMetrics + + +class TimingPolicy(Sequence): + _fields = [ + ('policy_id', SequenceOf, {'spec': ObjectIdentifier}), + ('max_offset', BigTime, {'explicit': 0, 'optional': True}), + ('max_delay', BigTime, {'explicit': 1, 'optional': True}), + ] + + +class SetOfTimingPolicy(SetOf): + _child_spec = TimingPolicy + + +class AttCertAttributeType(ObjectIdentifier): + _map = { + '1.3.6.1.5.5.7.10.1': 'authentication_info', + '1.3.6.1.5.5.7.10.2': 'access_identity', + '1.3.6.1.5.5.7.10.3': 'charging_identity', + '1.3.6.1.5.5.7.10.4': 'group', + '2.5.4.72': 'role', + '2.5.4.55': 'clearance', + '1.3.6.1.4.1.601.10.4.1': 'timing_metrics', + '1.3.6.1.4.1.601.10.4.2': 'timing_policy', + } + + +class AttCertAttribute(Sequence): + _fields = [ + ('type', AttCertAttributeType), + ('values', None), + ] + + _oid_specs = { + 'authentication_info': SetOfSvceAuthInfo, + 'access_identity': SetOfSvceAuthInfo, + 'charging_identity': SetOfIetfAttrSyntax, + 'group': SetOfIetfAttrSyntax, + 'role': SetOfRoleSyntax, + 'clearance': SetOfClearance, + 'timing_metrics': SetOfTimingMetrics, + 'timing_policy': SetOfTimingPolicy, + } + + def _values_spec(self): + return self._oid_specs.get(self['type'].native, SetOfAny) + + _spec_callbacks = { + 'values': _values_spec + } + + +class AttCertAttributes(SequenceOf): + _child_spec = AttCertAttribute + + +class AttributeCertificateInfoV2(Sequence): + _fields = [ + ('version', AttCertVersion), + ('holder', Holder), + ('issuer', AttCertIssuer), + ('signature', SignedDigestAlgorithm), + ('serial_number', Integer), + ('att_cert_validity_period', AttCertValidityPeriod), + ('attributes', AttCertAttributes), + ('issuer_unique_id', OctetBitString, {'optional': True}), + ('extensions', Extensions, {'optional': True}), + ] + + +class AttributeCertificateV2(Sequence): + # Handle the situation where a V2 cert is encoded as V1 + _bad_tag = 1 + + _fields = [ + ('ac_info', AttributeCertificateInfoV2), + ('signature_algorithm', SignedDigestAlgorithm), + ('signature', OctetBitString), + ] + + +class OtherCertificateFormat(Sequence): + _fields = [ + ('other_cert_format', ObjectIdentifier), + ('other_cert', Any), + ] + + +class CertificateChoices(Choice): + _alternatives = [ + ('certificate', Certificate), + ('extended_certificate', ExtendedCertificate, {'implicit': 0}), + ('v1_attr_cert', AttributeCertificateV1, {'implicit': 1}), + ('v2_attr_cert', AttributeCertificateV2, {'implicit': 2}), + ('other', OtherCertificateFormat, {'implicit': 3}), + ] + + def validate(self, class_, tag, contents): + """ + Ensures that the class and tag specified exist as an alternative. This + custom version fixes parsing broken encodings there a V2 attribute + # certificate is encoded as a V1 + + :param class_: + The integer class_ from the encoded value header + + :param tag: + The integer tag from the encoded value header + + :param contents: + A byte string of the contents of the value - used when the object + is explicitly tagged + + :raises: + ValueError - when value is not a valid alternative + """ + + super(CertificateChoices, self).validate(class_, tag, contents) + if self._choice == 2: + if AttCertVersion.load(Sequence.load(contents)[0].dump()).native == 'v2': + self._choice = 3 + + +class CertificateSet(SetOf): + _child_spec = CertificateChoices + + +class ContentInfo(Sequence): + _fields = [ + ('content_type', ContentType), + ('content', Any, {'explicit': 0, 'optional': True}), + ] + + _oid_pair = ('content_type', 'content') + _oid_specs = {} + + +class SetOfContentInfo(SetOf): + _child_spec = ContentInfo + + +class EncapsulatedContentInfo(Sequence): + _fields = [ + ('content_type', ContentType), + ('content', ParsableOctetString, {'explicit': 0, 'optional': True}), + ] + + _oid_pair = ('content_type', 'content') + _oid_specs = {} + + +class IssuerAndSerialNumber(Sequence): + _fields = [ + ('issuer', Name), + ('serial_number', Integer), + ] + + +class SignerIdentifier(Choice): + _alternatives = [ + ('issuer_and_serial_number', IssuerAndSerialNumber), + ('subject_key_identifier', OctetString, {'implicit': 0}), + ] + + +class DigestAlgorithms(SetOf): + _child_spec = DigestAlgorithm + + +class CertificateRevocationLists(SetOf): + _child_spec = CertificateList + + +class SCVPReqRes(Sequence): + _fields = [ + ('request', ContentInfo, {'explicit': 0, 'optional': True}), + ('response', ContentInfo), + ] + + +class OtherRevInfoFormatId(ObjectIdentifier): + _map = { + '1.3.6.1.5.5.7.16.2': 'ocsp_response', + '1.3.6.1.5.5.7.16.4': 'scvp', + } + + +class OtherRevocationInfoFormat(Sequence): + _fields = [ + ('other_rev_info_format', OtherRevInfoFormatId), + ('other_rev_info', Any), + ] + + _oid_pair = ('other_rev_info_format', 'other_rev_info') + _oid_specs = { + 'ocsp_response': OCSPResponse, + 'scvp': SCVPReqRes, + } + + +class RevocationInfoChoice(Choice): + _alternatives = [ + ('crl', CertificateList), + ('other', OtherRevocationInfoFormat, {'implicit': 1}), + ] + + +class RevocationInfoChoices(SetOf): + _child_spec = RevocationInfoChoice + + +class SignerInfo(Sequence): + _fields = [ + ('version', CMSVersion), + ('sid', SignerIdentifier), + ('digest_algorithm', DigestAlgorithm), + ('signed_attrs', CMSAttributes, {'implicit': 0, 'optional': True}), + ('signature_algorithm', SignedDigestAlgorithm), + ('signature', OctetString), + ('unsigned_attrs', CMSAttributes, {'implicit': 1, 'optional': True}), + ] + + +class SignerInfos(SetOf): + _child_spec = SignerInfo + + +class SignedData(Sequence): + _fields = [ + ('version', CMSVersion), + ('digest_algorithms', DigestAlgorithms), + ('encap_content_info', None), + ('certificates', CertificateSet, {'implicit': 0, 'optional': True}), + ('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}), + ('signer_infos', SignerInfos), + ] + + def _encap_content_info_spec(self): + # If the encap_content_info is version v1, then this could be a PKCS#7 + # structure, or a CMS structure. CMS wraps the encoded value in an + # Octet String tag. + + # If the version is greater than 1, it is definite CMS + if self['version'].native != 'v1': + return EncapsulatedContentInfo + + # Otherwise, the ContentInfo spec from PKCS#7 will be compatible with + # CMS v1 (which only allows Data, an Octet String) and PKCS#7, which + # allows Any + return ContentInfo + + _spec_callbacks = { + 'encap_content_info': _encap_content_info_spec + } + + +class OriginatorInfo(Sequence): + _fields = [ + ('certs', CertificateSet, {'implicit': 0, 'optional': True}), + ('crls', RevocationInfoChoices, {'implicit': 1, 'optional': True}), + ] + + +class RecipientIdentifier(Choice): + _alternatives = [ + ('issuer_and_serial_number', IssuerAndSerialNumber), + ('subject_key_identifier', OctetString, {'implicit': 0}), + ] + + +class KeyEncryptionAlgorithmId(ObjectIdentifier): + _map = { + '1.2.840.113549.1.1.1': 'rsa', + '2.16.840.1.101.3.4.1.5': 'aes128_wrap', + '2.16.840.1.101.3.4.1.8': 'aes128_wrap_pad', + '2.16.840.1.101.3.4.1.25': 'aes192_wrap', + '2.16.840.1.101.3.4.1.28': 'aes192_wrap_pad', + '2.16.840.1.101.3.4.1.45': 'aes256_wrap', + '2.16.840.1.101.3.4.1.48': 'aes256_wrap_pad', + } + + +class KeyEncryptionAlgorithm(_ForceNullParameters, Sequence): + _fields = [ + ('algorithm', KeyEncryptionAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + +class KeyTransRecipientInfo(Sequence): + _fields = [ + ('version', CMSVersion), + ('rid', RecipientIdentifier), + ('key_encryption_algorithm', KeyEncryptionAlgorithm), + ('encrypted_key', OctetString), + ] + + +class OriginatorIdentifierOrKey(Choice): + _alternatives = [ + ('issuer_and_serial_number', IssuerAndSerialNumber), + ('subject_key_identifier', OctetString, {'implicit': 0}), + ('originator_key', PublicKeyInfo, {'implicit': 1}), + ] + + +class OtherKeyAttribute(Sequence): + _fields = [ + ('key_attr_id', ObjectIdentifier), + ('key_attr', Any), + ] + + +class RecipientKeyIdentifier(Sequence): + _fields = [ + ('subject_key_identifier', OctetString), + ('date', GeneralizedTime, {'optional': True}), + ('other', OtherKeyAttribute, {'optional': True}), + ] + + +class KeyAgreementRecipientIdentifier(Choice): + _alternatives = [ + ('issuer_and_serial_number', IssuerAndSerialNumber), + ('r_key_id', RecipientKeyIdentifier, {'implicit': 0}), + ] + + +class RecipientEncryptedKey(Sequence): + _fields = [ + ('rid', KeyAgreementRecipientIdentifier), + ('encrypted_key', OctetString), + ] + + +class RecipientEncryptedKeys(SequenceOf): + _child_spec = RecipientEncryptedKey + + +class KeyAgreeRecipientInfo(Sequence): + _fields = [ + ('version', CMSVersion), + ('originator', OriginatorIdentifierOrKey, {'explicit': 0}), + ('ukm', OctetString, {'explicit': 1, 'optional': True}), + ('key_encryption_algorithm', KeyEncryptionAlgorithm), + ('recipient_encrypted_keys', RecipientEncryptedKeys), + ] + + +class KEKIdentifier(Sequence): + _fields = [ + ('key_identifier', OctetString), + ('date', GeneralizedTime, {'optional': True}), + ('other', OtherKeyAttribute, {'optional': True}), + ] + + +class KEKRecipientInfo(Sequence): + _fields = [ + ('version', CMSVersion), + ('kekid', KEKIdentifier), + ('key_encryption_algorithm', KeyEncryptionAlgorithm), + ('encrypted_key', OctetString), + ] + + +class PasswordRecipientInfo(Sequence): + _fields = [ + ('version', CMSVersion), + ('key_derivation_algorithm', KdfAlgorithm, {'implicit': 0, 'optional': True}), + ('key_encryption_algorithm', KeyEncryptionAlgorithm), + ('encrypted_key', OctetString), + ] + + +class OtherRecipientInfo(Sequence): + _fields = [ + ('ori_type', ObjectIdentifier), + ('ori_value', Any), + ] + + +class RecipientInfo(Choice): + _alternatives = [ + ('ktri', KeyTransRecipientInfo), + ('kari', KeyAgreeRecipientInfo, {'implicit': 1}), + ('kekri', KEKRecipientInfo, {'implicit': 2}), + ('pwri', PasswordRecipientInfo, {'implicit': 3}), + ('ori', OtherRecipientInfo, {'implicit': 4}), + ] + + +class RecipientInfos(SetOf): + _child_spec = RecipientInfo + + +class EncryptedContentInfo(Sequence): + _fields = [ + ('content_type', ContentType), + ('content_encryption_algorithm', EncryptionAlgorithm), + ('encrypted_content', OctetString, {'implicit': 0, 'optional': True}), + ] + + +class EnvelopedData(Sequence): + _fields = [ + ('version', CMSVersion), + ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}), + ('recipient_infos', RecipientInfos), + ('encrypted_content_info', EncryptedContentInfo), + ('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}), + ] + + +class SignedAndEnvelopedData(Sequence): + _fields = [ + ('version', CMSVersion), + ('recipient_infos', RecipientInfos), + ('digest_algorithms', DigestAlgorithms), + ('encrypted_content_info', EncryptedContentInfo), + ('certificates', CertificateSet, {'implicit': 0, 'optional': True}), + ('crls', CertificateRevocationLists, {'implicit': 1, 'optional': True}), + ('signer_infos', SignerInfos), + ] + + +class DigestedData(Sequence): + _fields = [ + ('version', CMSVersion), + ('digest_algorithm', DigestAlgorithm), + ('encap_content_info', None), + ('digest', OctetString), + ] + + def _encap_content_info_spec(self): + # If the encap_content_info is version v1, then this could be a PKCS#7 + # structure, or a CMS structure. CMS wraps the encoded value in an + # Octet String tag. + + # If the version is greater than 1, it is definite CMS + if self['version'].native != 'v1': + return EncapsulatedContentInfo + + # Otherwise, the ContentInfo spec from PKCS#7 will be compatible with + # CMS v1 (which only allows Data, an Octet String) and PKCS#7, which + # allows Any + return ContentInfo + + _spec_callbacks = { + 'encap_content_info': _encap_content_info_spec + } + + +class EncryptedData(Sequence): + _fields = [ + ('version', CMSVersion), + ('encrypted_content_info', EncryptedContentInfo), + ('unprotected_attrs', CMSAttributes, {'implicit': 1, 'optional': True}), + ] + + +class AuthenticatedData(Sequence): + _fields = [ + ('version', CMSVersion), + ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}), + ('recipient_infos', RecipientInfos), + ('mac_algorithm', HmacAlgorithm), + ('digest_algorithm', DigestAlgorithm, {'implicit': 1, 'optional': True}), + # This does not require the _spec_callbacks approach of SignedData and + # DigestedData since AuthenticatedData was not part of PKCS#7 + ('encap_content_info', EncapsulatedContentInfo), + ('auth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}), + ('mac', OctetString), + ('unauth_attrs', CMSAttributes, {'implicit': 3, 'optional': True}), + ] + + +class AuthEnvelopedData(Sequence): + _fields = [ + ('version', CMSVersion), + ('originator_info', OriginatorInfo, {'implicit': 0, 'optional': True}), + ('recipient_infos', RecipientInfos), + ('auth_encrypted_content_info', EncryptedContentInfo), + ('auth_attrs', CMSAttributes, {'implicit': 1, 'optional': True}), + ('mac', OctetString), + ('unauth_attrs', CMSAttributes, {'implicit': 2, 'optional': True}), + ] + + +class CompressionAlgorithmId(ObjectIdentifier): + _map = { + '1.2.840.113549.1.9.16.3.8': 'zlib', + } + + +class CompressionAlgorithm(Sequence): + _fields = [ + ('algorithm', CompressionAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + +class CompressedData(Sequence): + _fields = [ + ('version', CMSVersion), + ('compression_algorithm', CompressionAlgorithm), + ('encap_content_info', EncapsulatedContentInfo), + ] + + _decompressed = None + + @property + def decompressed(self): + if self._decompressed is None: + if zlib is None: + raise SystemError('The zlib module is not available') + self._decompressed = zlib.decompress(self['encap_content_info']['content'].native) + return self._decompressed + + +ContentInfo._oid_specs = { + 'data': OctetString, + 'signed_data': SignedData, + 'enveloped_data': EnvelopedData, + 'signed_and_enveloped_data': SignedAndEnvelopedData, + 'digested_data': DigestedData, + 'encrypted_data': EncryptedData, + 'authenticated_data': AuthenticatedData, + 'compressed_data': CompressedData, + 'authenticated_enveloped_data': AuthEnvelopedData, +} + + +EncapsulatedContentInfo._oid_specs = { + 'signed_data': SignedData, + 'enveloped_data': EnvelopedData, + 'signed_and_enveloped_data': SignedAndEnvelopedData, + 'digested_data': DigestedData, + 'encrypted_data': EncryptedData, + 'authenticated_data': AuthenticatedData, + 'compressed_data': CompressedData, + 'authenticated_enveloped_data': AuthEnvelopedData, +} + + +CMSAttribute._oid_specs = { + 'content_type': SetOfContentType, + 'message_digest': SetOfOctetString, + 'signing_time': SetOfTime, + 'counter_signature': SignerInfos, + 'signature_time_stamp_token': SetOfContentInfo, + 'cms_algorithm_protection': SetOfCMSAlgorithmProtection, +} diff --git a/venv/lib/python2.7/site-packages/asn1crypto/core.py b/venv/lib/python2.7/site-packages/asn1crypto/core.py new file mode 100644 index 0000000..14a8203 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/core.py @@ -0,0 +1,5242 @@ +# coding: utf-8 + +""" +ASN.1 type classes for universal types. Exports the following items: + + - load() + - Any() + - Asn1Value() + - BitString() + - BMPString() + - Boolean() + - CharacterString() + - Choice() + - EmbeddedPdv() + - Enumerated() + - GeneralizedTime() + - GeneralString() + - GraphicString() + - IA5String() + - InstanceOf() + - Integer() + - IntegerBitString() + - IntegerOctetString() + - Null() + - NumericString() + - ObjectDescriptor() + - ObjectIdentifier() + - OctetBitString() + - OctetString() + - PrintableString() + - Real() + - RelativeOid() + - Sequence() + - SequenceOf() + - Set() + - SetOf() + - TeletexString() + - UniversalString() + - UTCTime() + - UTF8String() + - VideotexString() + - VisibleString() + - VOID + - Void() + +Other type classes are defined that help compose the types listed above. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from datetime import datetime, timedelta +import binascii +import copy +import math +import re +import sys + +from . import _teletex_codec +from ._errors import unwrap +from ._ordereddict import OrderedDict +from ._types import type_name, str_cls, byte_cls, int_types, chr_cls +from .parser import _parse, _dump_header +from .util import int_to_bytes, int_from_bytes, timezone, extended_datetime + +if sys.version_info <= (3,): + from cStringIO import StringIO as BytesIO + + range = xrange # noqa + _PY2 = True + +else: + from io import BytesIO + + _PY2 = False + + +_teletex_codec.register() + + +CLASS_NUM_TO_NAME_MAP = { + 0: 'universal', + 1: 'application', + 2: 'context', + 3: 'private', +} + +CLASS_NAME_TO_NUM_MAP = { + 'universal': 0, + 'application': 1, + 'context': 2, + 'private': 3, + 0: 0, + 1: 1, + 2: 2, + 3: 3, +} + +METHOD_NUM_TO_NAME_MAP = { + 0: 'primitive', + 1: 'constructed', +} + + +_OID_RE = re.compile(r'^\d+(\.\d+)*$') + + +# A global tracker to ensure that _setup() is called for every class, even +# if is has been called for a parent class. This allows different _fields +# definitions for child classes. Without such a construct, the child classes +# would just see the parent class attributes and would use them. +_SETUP_CLASSES = {} + + +def load(encoded_data, strict=False): + """ + Loads a BER/DER-encoded byte string and construct a universal object based + on the tag value: + + - 1: Boolean + - 2: Integer + - 3: BitString + - 4: OctetString + - 5: Null + - 6: ObjectIdentifier + - 7: ObjectDescriptor + - 8: InstanceOf + - 9: Real + - 10: Enumerated + - 11: EmbeddedPdv + - 12: UTF8String + - 13: RelativeOid + - 16: Sequence, + - 17: Set + - 18: NumericString + - 19: PrintableString + - 20: TeletexString + - 21: VideotexString + - 22: IA5String + - 23: UTCTime + - 24: GeneralizedTime + - 25: GraphicString + - 26: VisibleString + - 27: GeneralString + - 28: UniversalString + - 29: CharacterString + - 30: BMPString + + :param encoded_data: + A byte string of BER or DER-encoded data + + :param strict: + A boolean indicating if trailing data should be forbidden - if so, a + ValueError will be raised when trailing data exists + + :raises: + ValueError - when strict is True and trailing data is present + ValueError - when the encoded value tag a tag other than listed above + ValueError - when the ASN.1 header length is longer than the data + TypeError - when encoded_data is not a byte string + + :return: + An instance of the one of the universal classes + """ + + return Asn1Value.load(encoded_data, strict=strict) + + +class Asn1Value(object): + """ + The basis of all ASN.1 values + """ + + # The integer 0 for primitive, 1 for constructed + method = None + + # An integer 0 through 3 - see CLASS_NUM_TO_NAME_MAP for value + class_ = None + + # An integer 1 or greater indicating the tag number + tag = None + + # An alternate tag allowed for this type - used for handling broken + # structures where a string value is encoded using an incorrect tag + _bad_tag = None + + # If the value has been implicitly tagged + implicit = False + + # If explicitly tagged, a tuple of 2-element tuples containing the + # class int and tag int, from innermost to outermost + explicit = None + + # The BER/DER header bytes + _header = None + + # Raw encoded value bytes not including class, method, tag, length header + contents = None + + # The BER/DER trailer bytes + _trailer = b'' + + # The native python representation of the value - this is not used by + # some classes since they utilize _bytes or _unicode + _native = None + + @classmethod + def load(cls, encoded_data, strict=False, **kwargs): + """ + Loads a BER/DER-encoded byte string using the current class as the spec + + :param encoded_data: + A byte string of BER or DER-encoded data + + :param strict: + A boolean indicating if trailing data should be forbidden - if so, a + ValueError will be raised when trailing data exists + + :return: + An instance of the current class + """ + + if not isinstance(encoded_data, byte_cls): + raise TypeError('encoded_data must be a byte string, not %s' % type_name(encoded_data)) + + spec = None + if cls.tag is not None: + spec = cls + + value, _ = _parse_build(encoded_data, spec=spec, spec_params=kwargs, strict=strict) + return value + + def __init__(self, explicit=None, implicit=None, no_explicit=False, tag_type=None, class_=None, tag=None, + optional=None, default=None, contents=None): + """ + The optional parameter is not used, but rather included so we don't + have to delete it from the parameter dictionary when passing as keyword + args + + :param explicit: + An int tag number for explicit tagging, or a 2-element tuple of + class and tag. + + :param implicit: + An int tag number for implicit tagging, or a 2-element tuple of + class and tag. + + :param no_explicit: + If explicit tagging info should be removed from this instance. + Used internally to allow contructing the underlying value that + has been wrapped in an explicit tag. + + :param tag_type: + None for normal values, or one of "implicit", "explicit" for tagged + values. Deprecated in favor of explicit and implicit params. + + :param class_: + The class for the value - defaults to "universal" if tag_type is + None, otherwise defaults to "context". Valid values include: + - "universal" + - "application" + - "context" + - "private" + Deprecated in favor of explicit and implicit params. + + :param tag: + The integer tag to override - usually this is used with tag_type or + class_. Deprecated in favor of explicit and implicit params. + + :param optional: + Dummy parameter that allows "optional" key in spec param dicts + + :param default: + The default value to use if the value is currently None + + :param contents: + A byte string of the encoded contents of the value + + :raises: + ValueError - when implicit, explicit, tag_type, class_ or tag are invalid values + """ + + try: + if self.__class__ not in _SETUP_CLASSES: + cls = self.__class__ + # Allow explicit to be specified as a simple 2-element tuple + # instead of requiring the user make a nested tuple + if cls.explicit is not None and isinstance(cls.explicit[0], int_types): + cls.explicit = (cls.explicit, ) + if hasattr(cls, '_setup'): + self._setup() + _SETUP_CLASSES[cls] = True + + # Normalize tagging values + if explicit is not None: + if isinstance(explicit, int_types): + if class_ is None: + class_ = 'context' + explicit = (class_, explicit) + # Prevent both explicit and tag_type == 'explicit' + if tag_type == 'explicit': + tag_type = None + tag = None + + if implicit is not None: + if isinstance(implicit, int_types): + if class_ is None: + class_ = 'context' + implicit = (class_, implicit) + # Prevent both implicit and tag_type == 'implicit' + if tag_type == 'implicit': + tag_type = None + tag = None + + # Convert old tag_type API to explicit/implicit params + if tag_type is not None: + if class_ is None: + class_ = 'context' + if tag_type == 'explicit': + explicit = (class_, tag) + elif tag_type == 'implicit': + implicit = (class_, tag) + else: + raise ValueError(unwrap( + ''' + tag_type must be one of "implicit", "explicit", not %s + ''', + repr(tag_type) + )) + + if explicit is not None: + # Ensure we have a tuple of 2-element tuples + if len(explicit) == 2 and isinstance(explicit[1], int_types): + explicit = (explicit, ) + for class_, tag in explicit: + invalid_class = None + if isinstance(class_, int_types): + if class_ not in CLASS_NUM_TO_NAME_MAP: + invalid_class = class_ + else: + if class_ not in CLASS_NAME_TO_NUM_MAP: + invalid_class = class_ + class_ = CLASS_NAME_TO_NUM_MAP[class_] + if invalid_class is not None: + raise ValueError(unwrap( + ''' + explicit class must be one of "universal", "application", + "context", "private", not %s + ''', + repr(invalid_class) + )) + if tag is not None: + if not isinstance(tag, int_types): + raise TypeError(unwrap( + ''' + explicit tag must be an integer, not %s + ''', + type_name(tag) + )) + if self.explicit is None: + self.explicit = ((class_, tag), ) + else: + self.explicit = self.explicit + ((class_, tag), ) + + elif implicit is not None: + class_, tag = implicit + if class_ not in CLASS_NAME_TO_NUM_MAP: + raise ValueError(unwrap( + ''' + implicit class must be one of "universal", "application", + "context", "private", not %s + ''', + repr(class_) + )) + if tag is not None: + if not isinstance(tag, int_types): + raise TypeError(unwrap( + ''' + implicit tag must be an integer, not %s + ''', + type_name(tag) + )) + self.class_ = CLASS_NAME_TO_NUM_MAP[class_] + self.tag = tag + self.implicit = True + else: + if class_ is not None: + if class_ not in CLASS_NUM_TO_NAME_MAP: + raise ValueError(unwrap( + ''' + class_ must be one of "universal", "application", + "context", "private", not %s + ''', + repr(class_) + )) + self.class_ = CLASS_NAME_TO_NUM_MAP[class_] + + if tag is not None: + self.tag = tag + + if no_explicit: + self.explicit = None + + if contents is not None: + self.contents = contents + + elif default is not None: + self.set(default) + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args + raise e + + def __str__(self): + """ + Since str is different in Python 2 and 3, this calls the appropriate + method, __unicode__() or __bytes__() + + :return: + A unicode string + """ + + if _PY2: + return self.__bytes__() + else: + return self.__unicode__() + + def __repr__(self): + """ + :return: + A unicode string + """ + + if _PY2: + return '<%s %s b%s>' % (type_name(self), id(self), repr(self.dump())) + else: + return '<%s %s %s>' % (type_name(self), id(self), repr(self.dump())) + + def __bytes__(self): + """ + A fall-back method for print() in Python 2 + + :return: + A byte string of the output of repr() + """ + + return self.__repr__().encode('utf-8') + + def __unicode__(self): + """ + A fall-back method for print() in Python 3 + + :return: + A unicode string of the output of repr() + """ + + return self.__repr__() + + def _new_instance(self): + """ + Constructs a new copy of the current object, preserving any tagging + + :return: + An Asn1Value object + """ + + new_obj = self.__class__() + new_obj.class_ = self.class_ + new_obj.tag = self.tag + new_obj.implicit = self.implicit + new_obj.explicit = self.explicit + return new_obj + + def __copy__(self): + """ + Implements the copy.copy() interface + + :return: + A new shallow copy of the current Asn1Value object + """ + + new_obj = self._new_instance() + new_obj._copy(self, copy.copy) + return new_obj + + def __deepcopy__(self, memo): + """ + Implements the copy.deepcopy() interface + + :param memo: + A dict for memoization + + :return: + A new deep copy of the current Asn1Value object + """ + + new_obj = self._new_instance() + memo[id(self)] = new_obj + new_obj._copy(self, copy.deepcopy) + return new_obj + + def copy(self): + """ + Copies the object, preserving any special tagging from it + + :return: + An Asn1Value object + """ + + return copy.deepcopy(self) + + def retag(self, tagging, tag=None): + """ + Copies the object, applying a new tagging to it + + :param tagging: + A dict containing the keys "explicit" and "implicit". Legacy + API allows a unicode string of "implicit" or "explicit". + + :param tag: + A integer tag number. Only used when tagging is a unicode string. + + :return: + An Asn1Value object + """ + + # This is required to preserve the old API + if not isinstance(tagging, dict): + tagging = {tagging: tag} + new_obj = self.__class__(explicit=tagging.get('explicit'), implicit=tagging.get('implicit')) + new_obj._copy(self, copy.deepcopy) + return new_obj + + def untag(self): + """ + Copies the object, removing any special tagging from it + + :return: + An Asn1Value object + """ + + new_obj = self.__class__() + new_obj._copy(self, copy.deepcopy) + return new_obj + + def _copy(self, other, copy_func): + """ + Copies the contents of another Asn1Value object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + if self.__class__ != other.__class__: + raise TypeError(unwrap( + ''' + Can not copy values from %s object to %s object + ''', + type_name(other), + type_name(self) + )) + + self.contents = other.contents + self._native = copy_func(other._native) + + def debug(self, nest_level=1): + """ + Show the binary data and parsed data in a tree structure + """ + + prefix = ' ' * nest_level + + # This interacts with Any and moves the tag, implicit, explicit, _header, + # contents, _footer to the parsed value so duplicate data isn't present + has_parsed = hasattr(self, 'parsed') + + _basic_debug(prefix, self) + if has_parsed: + self.parsed.debug(nest_level + 2) + elif hasattr(self, 'chosen'): + self.chosen.debug(nest_level + 2) + else: + if _PY2 and isinstance(self.native, byte_cls): + print('%s Native: b%s' % (prefix, repr(self.native))) + else: + print('%s Native: %s' % (prefix, self.native)) + + def dump(self, force=False): + """ + Encodes the value using DER + + :param force: + If the encoded contents already exist, clear them and regenerate + to ensure they are in DER format instead of BER format + + :return: + A byte string of the DER-encoded value + """ + + contents = self.contents + + if self._header is None or force: + if isinstance(self, Constructable) and self._indefinite: + self.method = 0 + + header = _dump_header(self.class_, self.method, self.tag, self.contents) + + if self.explicit is not None: + for class_, tag in self.explicit: + header = _dump_header(class_, 1, tag, header + self.contents) + header + + self._header = header + self._trailer = b'' + + return self._header + contents + + +class ValueMap(): + """ + Basic functionality that allows for mapping values from ints or OIDs to + python unicode strings + """ + + # A dict from primitive value (int or OID) to unicode string. This needs + # to be defined in the source code + _map = None + + # A dict from unicode string to int/OID. This is automatically generated + # from _map the first time it is needed + _reverse_map = None + + def _setup(self): + """ + Generates _reverse_map from _map + """ + + cls = self.__class__ + if cls._map is None or cls._reverse_map is not None: + return + cls._reverse_map = {} + for key, value in cls._map.items(): + cls._reverse_map[value] = key + + +class Castable(object): + """ + A mixin to handle converting an object between different classes that + represent the same encoded value, but with different rules for converting + to and from native Python values + """ + + def cast(self, other_class): + """ + Converts the current object into an object of a different class. The + new class must use the ASN.1 encoding for the value. + + :param other_class: + The class to instantiate the new object from + + :return: + An instance of the type other_class + """ + + if other_class.tag != self.__class__.tag: + raise TypeError(unwrap( + ''' + Can not covert a value from %s object to %s object since they + use different tags: %d versus %d + ''', + type_name(other_class), + type_name(self), + other_class.tag, + self.__class__.tag + )) + + new_obj = other_class() + new_obj.class_ = self.class_ + new_obj.implicit = self.implicit + new_obj.explicit = self.explicit + new_obj._header = self._header + new_obj.contents = self.contents + new_obj._trailer = self._trailer + if isinstance(self, Constructable): + new_obj.method = self.method + new_obj._indefinite = self._indefinite + return new_obj + + +class Constructable(object): + """ + A mixin to handle string types that may be constructed from chunks + contained within an indefinite length BER-encoded container + """ + + # Instance attribute indicating if an object was indefinite + # length when parsed - affects parsing and dumping + _indefinite = False + + # Class attribute that indicates the offset into self.contents + # that contains the chunks of data to merge + _chunks_offset = 0 + + def _merge_chunks(self): + """ + :return: + A concatenation of the native values of the contained chunks + """ + + if not self._indefinite: + return self._as_chunk() + + pointer = self._chunks_offset + contents_len = len(self.contents) + output = None + + while pointer < contents_len: + # We pass the current class as the spec so content semantics are preserved + sub_value, pointer = _parse_build(self.contents, pointer, spec=self.__class__) + if output is None: + output = sub_value._merge_chunks() + else: + output += sub_value._merge_chunks() + + if output is None: + return self._as_chunk() + + return output + + def _as_chunk(self): + """ + A method to return a chunk of data that can be combined for + constructed method values + + :return: + A native Python value that can be added together. Examples include + byte strings, unicode strings or tuples. + """ + + if self._chunks_offset == 0: + return self.contents + return self.contents[self._chunks_offset:] + + def _copy(self, other, copy_func): + """ + Copies the contents of another Constructable object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + super(Constructable, self)._copy(other, copy_func) + self.method = other.method + self._indefinite = other._indefinite + + +class Void(Asn1Value): + """ + A representation of an optional value that is not present. Has .native + property and .dump() method to be compatible with other value classes. + """ + + contents = b'' + + def __eq__(self, other): + """ + :param other: + The other Primitive to compare to + + :return: + A boolean + """ + + return other.__class__ == self.__class__ + + def __nonzero__(self): + return False + + def __len__(self): + return 0 + + def __iter__(self): + return iter(()) + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + None + """ + + return None + + def dump(self, force=False): + """ + Encodes the value using DER + + :param force: + If the encoded contents already exist, clear them and regenerate + to ensure they are in DER format instead of BER format + + :return: + A byte string of the DER-encoded value + """ + + return b'' + + +VOID = Void() + + +class Any(Asn1Value): + """ + A value class that can contain any value, and allows for easy parsing of + the underlying encoded value using a spec. This is normally contained in + a Structure that has an ObjectIdentifier field and _oid_pair and _oid_specs + defined. + """ + + # The parsed value object + _parsed = None + + def __init__(self, value=None, **kwargs): + """ + Sets the value of the object before passing to Asn1Value.__init__() + + :param value: + An Asn1Value object that will be set as the parsed value + """ + + Asn1Value.__init__(self, **kwargs) + + try: + if value is not None: + if not isinstance(value, Asn1Value): + raise TypeError(unwrap( + ''' + value must be an instance of Asn1Value, not %s + ''', + type_name(value) + )) + + self._parsed = (value, value.__class__, None) + self.contents = value.dump() + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args + raise e + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + The .native value from the parsed value object + """ + + if self._parsed is None: + self.parse() + + return self._parsed[0].native + + @property + def parsed(self): + """ + Returns the parsed object from .parse() + + :return: + The object returned by .parse() + """ + + if self._parsed is None: + self.parse() + + return self._parsed[0] + + def parse(self, spec=None, spec_params=None): + """ + Parses the contents generically, or using a spec with optional params + + :param spec: + A class derived from Asn1Value that defines what class_ and tag the + value should have, and the semantics of the encoded value. The + return value will be of this type. If omitted, the encoded value + will be decoded using the standard universal tag based on the + encoded tag number. + + :param spec_params: + A dict of params to pass to the spec object + + :return: + An object of the type spec, or if not present, a child of Asn1Value + """ + + if self._parsed is None or self._parsed[1:3] != (spec, spec_params): + try: + passed_params = spec_params or {} + _tag_type_to_explicit_implicit(passed_params) + if self.explicit is not None: + if 'explicit' in passed_params: + passed_params['explicit'] = self.explicit + passed_params['explicit'] + else: + passed_params['explicit'] = self.explicit + contents = self._header + self.contents + self._trailer + parsed_value, _ = _parse_build( + contents, + spec=spec, + spec_params=passed_params + ) + self._parsed = (parsed_value, spec, spec_params) + + # Once we've parsed the Any value, clear any attributes from this object + # since they are now duplicate + self.tag = None + self.explicit = None + self.implicit = False + self._header = b'' + self.contents = contents + self._trailer = b'' + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args + raise e + return self._parsed[0] + + def _copy(self, other, copy_func): + """ + Copies the contents of another Any object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + super(Any, self)._copy(other, copy_func) + self._parsed = copy_func(other._parsed) + + def dump(self, force=False): + """ + Encodes the value using DER + + :param force: + If the encoded contents already exist, clear them and regenerate + to ensure they are in DER format instead of BER format + + :return: + A byte string of the DER-encoded value + """ + + if self._parsed is None: + self.parse() + + return self._parsed[0].dump(force=force) + + +class Choice(Asn1Value): + """ + A class to handle when a value may be one of several options + """ + + # The index in _alternatives of the validated alternative + _choice = None + + # The name of the chosen alternative + _name = None + + # The Asn1Value object for the chosen alternative + _parsed = None + + # A list of tuples in one of the following forms. + # + # Option 1, a unicode string field name and a value class + # + # ("name", Asn1ValueClass) + # + # Option 2, same as Option 1, but with a dict of class params + # + # ("name", Asn1ValueClass, {'explicit': 5}) + _alternatives = None + + # A dict that maps tuples of (class_, tag) to an index in _alternatives + _id_map = None + + # A dict that maps alternative names to an index in _alternatives + _name_map = None + + @classmethod + def load(cls, encoded_data, strict=False, **kwargs): + """ + Loads a BER/DER-encoded byte string using the current class as the spec + + :param encoded_data: + A byte string of BER or DER encoded data + + :param strict: + A boolean indicating if trailing data should be forbidden - if so, a + ValueError will be raised when trailing data exists + + :return: + A instance of the current class + """ + + if not isinstance(encoded_data, byte_cls): + raise TypeError('encoded_data must be a byte string, not %s' % type_name(encoded_data)) + + value, _ = _parse_build(encoded_data, spec=cls, spec_params=kwargs, strict=strict) + return value + + def _setup(self): + """ + Generates _id_map from _alternatives to allow validating contents + """ + + cls = self.__class__ + cls._id_map = {} + cls._name_map = {} + for index, info in enumerate(cls._alternatives): + if len(info) < 3: + info = info + ({},) + cls._alternatives[index] = info + id_ = _build_id_tuple(info[2], info[1]) + cls._id_map[id_] = index + cls._name_map[info[0]] = index + + def __init__(self, name=None, value=None, **kwargs): + """ + Checks to ensure implicit tagging is not being used since it is + incompatible with Choice, then forwards on to Asn1Value.__init__() + + :param name: + The name of the alternative to be set - used with value. + Alternatively this may be a dict with a single key being the name + and the value being the value, or a two-element tuple of the the + name and the value. + + :param value: + The alternative value to set - used with name + + :raises: + ValueError - when implicit param is passed (or legacy tag_type param is "implicit") + """ + + _tag_type_to_explicit_implicit(kwargs) + + Asn1Value.__init__(self, **kwargs) + + try: + if kwargs.get('implicit') is not None: + raise ValueError(unwrap( + ''' + The Choice type can not be implicitly tagged even if in an + implicit module - due to its nature any tagging must be + explicit + ''' + )) + + if name is not None: + if isinstance(name, dict): + if len(name) != 1: + raise ValueError(unwrap( + ''' + When passing a dict as the "name" argument to %s, + it must have a single key/value - however %d were + present + ''', + type_name(self), + len(name) + )) + name, value = list(name.items())[0] + + if isinstance(name, tuple): + if len(name) != 2: + raise ValueError(unwrap( + ''' + When passing a tuple as the "name" argument to %s, + it must have two elements, the name and value - + however %d were present + ''', + type_name(self), + len(name) + )) + value = name[1] + name = name[0] + + if name not in self._name_map: + raise ValueError(unwrap( + ''' + The name specified, "%s", is not a valid alternative + for %s + ''', + name, + type_name(self) + )) + + self._choice = self._name_map[name] + _, spec, params = self._alternatives[self._choice] + + if not isinstance(value, spec): + value = spec(value, **params) + else: + value = _fix_tagging(value, params) + self._parsed = value + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args + raise e + + @property + def name(self): + """ + :return: + A unicode string of the field name of the chosen alternative + """ + if not self._name: + self._name = self._alternatives[self._choice][0] + return self._name + + def parse(self): + """ + Parses the detected alternative + + :return: + An Asn1Value object of the chosen alternative + """ + + if self._parsed is not None: + return self._parsed + + try: + _, spec, params = self._alternatives[self._choice] + self._parsed, _ = _parse_build(self.contents, spec=spec, spec_params=params) + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args + raise e + + @property + def chosen(self): + """ + :return: + An Asn1Value object of the chosen alternative + """ + + return self.parse() + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + The .native value from the contained value object + """ + + return self.chosen.native + + def validate(self, class_, tag, contents): + """ + Ensures that the class and tag specified exist as an alternative + + :param class_: + The integer class_ from the encoded value header + + :param tag: + The integer tag from the encoded value header + + :param contents: + A byte string of the contents of the value - used when the object + is explicitly tagged + + :raises: + ValueError - when value is not a valid alternative + """ + + id_ = (class_, tag) + + if self.explicit is not None: + if self.explicit[-1] != id_: + raise ValueError(unwrap( + ''' + %s was explicitly tagged, but the value provided does not + match the class and tag + ''', + type_name(self) + )) + + ((class_, _, tag, _, _, _), _) = _parse(contents, len(contents)) + id_ = (class_, tag) + + if id_ in self._id_map: + self._choice = self._id_map[id_] + return + + # This means the Choice was implicitly tagged + if self.class_ is not None and self.tag is not None: + if len(self._alternatives) > 1: + raise ValueError(unwrap( + ''' + %s was implicitly tagged, but more than one alternative + exists + ''', + type_name(self) + )) + if id_ == (self.class_, self.tag): + self._choice = 0 + return + + asn1 = self._format_class_tag(class_, tag) + asn1s = [self._format_class_tag(pair[0], pair[1]) for pair in self._id_map] + + raise ValueError(unwrap( + ''' + Value %s did not match the class and tag of any of the alternatives + in %s: %s + ''', + asn1, + type_name(self), + ', '.join(asn1s) + )) + + def _format_class_tag(self, class_, tag): + """ + :return: + A unicode string of a human-friendly representation of the class and tag + """ + + return '[%s %s]' % (CLASS_NUM_TO_NAME_MAP[class_].upper(), tag) + + def _copy(self, other, copy_func): + """ + Copies the contents of another Choice object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + super(Choice, self)._copy(other, copy_func) + self._choice = other._choice + self._name = other._name + self._parsed = copy_func(other._parsed) + + def dump(self, force=False): + """ + Encodes the value using DER + + :param force: + If the encoded contents already exist, clear them and regenerate + to ensure they are in DER format instead of BER format + + :return: + A byte string of the DER-encoded value + """ + + self.contents = self.chosen.dump(force=force) + if self._header is None or force: + self._header = b'' + if self.explicit is not None: + for class_, tag in self.explicit: + self._header = _dump_header(class_, 1, tag, self._header + self.contents) + self._header + return self._header + self.contents + + +class Concat(object): + """ + A class that contains two or more encoded child values concatentated + together. THIS IS NOT PART OF THE ASN.1 SPECIFICATION! This exists to handle + the x509.TrustedCertificate() class for OpenSSL certificates containing + extra information. + """ + + # A list of the specs of the concatenated values + _child_specs = None + + _children = None + + @classmethod + def load(cls, encoded_data, strict=False): + """ + Loads a BER/DER-encoded byte string using the current class as the spec + + :param encoded_data: + A byte string of BER or DER encoded data + + :param strict: + A boolean indicating if trailing data should be forbidden - if so, a + ValueError will be raised when trailing data exists + + :return: + A Concat object + """ + + return cls(contents=encoded_data, strict=strict) + + def __init__(self, value=None, contents=None, strict=False): + """ + :param value: + A native Python datatype to initialize the object value with + + :param contents: + A byte string of the encoded contents of the value + + :param strict: + A boolean indicating if trailing data should be forbidden - if so, a + ValueError will be raised when trailing data exists in contents + + :raises: + ValueError - when an error occurs with one of the children + TypeError - when an error occurs with one of the children + """ + + if contents is not None: + try: + contents_len = len(contents) + self._children = [] + + offset = 0 + for spec in self._child_specs: + if offset < contents_len: + child_value, offset = _parse_build(contents, pointer=offset, spec=spec) + else: + child_value = spec() + self._children.append(child_value) + + if strict and offset != contents_len: + extra_bytes = contents_len - offset + raise ValueError('Extra data - %d bytes of trailing data were provided' % extra_bytes) + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args + raise e + + if value is not None: + if self._children is None: + self._children = [None] * len(self._child_specs) + for index, data in enumerate(value): + self.__setitem__(index, data) + + def __str__(self): + """ + Since str is different in Python 2 and 3, this calls the appropriate + method, __unicode__() or __bytes__() + + :return: + A unicode string + """ + + if _PY2: + return self.__bytes__() + else: + return self.__unicode__() + + def __bytes__(self): + """ + A byte string of the DER-encoded contents + """ + + return self.dump() + + def __unicode__(self): + """ + :return: + A unicode string + """ + + return repr(self) + + def __repr__(self): + """ + :return: + A unicode string + """ + + return '<%s %s %s>' % (type_name(self), id(self), repr(self.dump())) + + def __copy__(self): + """ + Implements the copy.copy() interface + + :return: + A new shallow copy of the Concat object + """ + + new_obj = self.__class__() + new_obj._copy(self, copy.copy) + return new_obj + + def __deepcopy__(self, memo): + """ + Implements the copy.deepcopy() interface + + :param memo: + A dict for memoization + + :return: + A new deep copy of the Concat object and all child objects + """ + + new_obj = self.__class__() + memo[id(self)] = new_obj + new_obj._copy(self, copy.deepcopy) + return new_obj + + def copy(self): + """ + Copies the object + + :return: + A Concat object + """ + + return copy.deepcopy(self) + + def _copy(self, other, copy_func): + """ + Copies the contents of another Concat object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + if self.__class__ != other.__class__: + raise TypeError(unwrap( + ''' + Can not copy values from %s object to %s object + ''', + type_name(other), + type_name(self) + )) + + self._children = copy_func(other._children) + + def debug(self, nest_level=1): + """ + Show the binary data and parsed data in a tree structure + """ + + prefix = ' ' * nest_level + print('%s%s Object #%s' % (prefix, type_name(self), id(self))) + print('%s Children:' % (prefix,)) + for child in self._children: + child.debug(nest_level + 2) + + def dump(self, force=False): + """ + Encodes the value using DER + + :param force: + If the encoded contents already exist, clear them and regenerate + to ensure they are in DER format instead of BER format + + :return: + A byte string of the DER-encoded value + """ + + contents = b'' + for child in self._children: + contents += child.dump(force=force) + return contents + + @property + def contents(self): + """ + :return: + A byte string of the DER-encoded contents of the children + """ + + return self.dump() + + def __len__(self): + """ + :return: + Integer + """ + + return len(self._children) + + def __getitem__(self, key): + """ + Allows accessing children by index + + :param key: + An integer of the child index + + :raises: + KeyError - when an index is invalid + + :return: + The Asn1Value object of the child specified + """ + + if key > len(self._child_specs) - 1 or key < 0: + raise KeyError(unwrap( + ''' + No child is definition for position %d of %s + ''', + key, + type_name(self) + )) + + return self._children[key] + + def __setitem__(self, key, value): + """ + Allows settings children by index + + :param key: + An integer of the child index + + :param value: + An Asn1Value object to set the child to + + :raises: + KeyError - when an index is invalid + ValueError - when the value is not an instance of Asn1Value + """ + + if key > len(self._child_specs) - 1 or key < 0: + raise KeyError(unwrap( + ''' + No child is defined for position %d of %s + ''', + key, + type_name(self) + )) + + if not isinstance(value, Asn1Value): + raise ValueError(unwrap( + ''' + Value for child %s of %s is not an instance of + asn1crypto.core.Asn1Value + ''', + key, + type_name(self) + )) + + self._children[key] = value + + def __iter__(self): + """ + :return: + An iterator of child values + """ + + return iter(self._children) + + +class Primitive(Asn1Value): + """ + Sets the class_ and method attributes for primitive, universal values + """ + + class_ = 0 + + method = 0 + + def __init__(self, value=None, default=None, contents=None, **kwargs): + """ + Sets the value of the object before passing to Asn1Value.__init__() + + :param value: + A native Python datatype to initialize the object value with + + :param default: + The default value if no value is specified + + :param contents: + A byte string of the encoded contents of the value + """ + + Asn1Value.__init__(self, **kwargs) + + try: + if contents is not None: + self.contents = contents + + elif value is not None: + self.set(value) + + elif default is not None: + self.set(default) + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args + raise e + + def set(self, value): + """ + Sets the value of the object + + :param value: + A byte string + """ + + if not isinstance(value, byte_cls): + raise TypeError(unwrap( + ''' + %s value must be a byte string, not %s + ''', + type_name(self), + type_name(value) + )) + + self._native = value + self.contents = value + self._header = None + if self._trailer != b'': + self._trailer = b'' + + def dump(self, force=False): + """ + Encodes the value using DER + + :param force: + If the encoded contents already exist, clear them and regenerate + to ensure they are in DER format instead of BER format + + :return: + A byte string of the DER-encoded value + """ + + if force: + native = self.native + self.contents = None + self.set(native) + + return Asn1Value.dump(self) + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + """ + :param other: + The other Primitive to compare to + + :return: + A boolean + """ + + if not isinstance(other, Primitive): + return False + + if self.contents != other.contents: + return False + + # We compare class tag numbers since object tag numbers could be + # different due to implicit or explicit tagging + if self.__class__.tag != other.__class__.tag: + return False + + if self.__class__ == other.__class__ and self.contents == other.contents: + return True + + # If the objects share a common base class that is not too low-level + # then we can compare the contents + self_bases = (set(self.__class__.__bases__) | set([self.__class__])) - set([Asn1Value, Primitive, ValueMap]) + other_bases = (set(other.__class__.__bases__) | set([other.__class__])) - set([Asn1Value, Primitive, ValueMap]) + if self_bases | other_bases: + return self.contents == other.contents + + # When tagging is going on, do the extra work of constructing new + # objects to see if the dumped representation are the same + if self.implicit or self.explicit or other.implicit or other.explicit: + return self.untag().dump() == other.untag().dump() + + return self.dump() == other.dump() + + +class AbstractString(Constructable, Primitive): + """ + A base class for all strings that have a known encoding. In general, we do + not worry ourselves with confirming that the decoded values match a specific + set of characters, only that they are decoded into a Python unicode string + """ + + # The Python encoding name to use when decoding or encoded the contents + _encoding = 'latin1' + + # Instance attribute of (possibly-merged) unicode string + _unicode = None + + def set(self, value): + """ + Sets the value of the string + + :param value: + A unicode string + """ + + if not isinstance(value, str_cls): + raise TypeError(unwrap( + ''' + %s value must be a unicode string, not %s + ''', + type_name(self), + type_name(value) + )) + + self._unicode = value + self.contents = value.encode(self._encoding) + self._header = None + if self._indefinite: + self._indefinite = False + self.method = 0 + if self._trailer != b'': + self._trailer = b'' + + def __unicode__(self): + """ + :return: + A unicode string + """ + + if self.contents is None: + return '' + if self._unicode is None: + self._unicode = self._merge_chunks().decode(self._encoding) + return self._unicode + + def _copy(self, other, copy_func): + """ + Copies the contents of another AbstractString object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + super(AbstractString, self)._copy(other, copy_func) + self._unicode = other._unicode + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + A unicode string or None + """ + + if self.contents is None: + return None + + return self.__unicode__() + + +class Boolean(Primitive): + """ + Represents a boolean in both ASN.1 and Python + """ + + tag = 1 + + def set(self, value): + """ + Sets the value of the object + + :param value: + True, False or another value that works with bool() + """ + + self._native = bool(value) + self.contents = b'\x00' if not value else b'\xff' + self._header = None + if self._trailer != b'': + self._trailer = b'' + + # Python 2 + def __nonzero__(self): + """ + :return: + True or False + """ + return self.__bool__() + + def __bool__(self): + """ + :return: + True or False + """ + return self.contents != b'\x00' + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + True, False or None + """ + + if self.contents is None: + return None + + if self._native is None: + self._native = self.__bool__() + return self._native + + +class Integer(Primitive, ValueMap): + """ + Represents an integer in both ASN.1 and Python + """ + + tag = 2 + + def set(self, value): + """ + Sets the value of the object + + :param value: + An integer, or a unicode string if _map is set + + :raises: + ValueError - when an invalid value is passed + """ + + if isinstance(value, str_cls): + if self._map is None: + raise ValueError(unwrap( + ''' + %s value is a unicode string, but no _map provided + ''', + type_name(self) + )) + + if value not in self._reverse_map: + raise ValueError(unwrap( + ''' + %s value, %s, is not present in the _map + ''', + type_name(self), + value + )) + + value = self._reverse_map[value] + + elif not isinstance(value, int_types): + raise TypeError(unwrap( + ''' + %s value must be an integer or unicode string when a name_map + is provided, not %s + ''', + type_name(self), + type_name(value) + )) + + self._native = self._map[value] if self._map and value in self._map else value + + self.contents = int_to_bytes(value, signed=True) + self._header = None + if self._trailer != b'': + self._trailer = b'' + + def __int__(self): + """ + :return: + An integer + """ + return int_from_bytes(self.contents, signed=True) + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + An integer or None + """ + + if self.contents is None: + return None + + if self._native is None: + self._native = self.__int__() + if self._map is not None and self._native in self._map: + self._native = self._map[self._native] + return self._native + + +class BitString(Constructable, Castable, Primitive, ValueMap, object): + """ + Represents a bit string from ASN.1 as a Python tuple of 1s and 0s + """ + + tag = 3 + + _size = None + + # Used with _as_chunk() from Constructable + _chunk = None + _chunks_offset = 1 + + def _setup(self): + """ + Generates _reverse_map from _map + """ + + ValueMap._setup(self) + + cls = self.__class__ + if cls._map is not None: + cls._size = max(self._map.keys()) + 1 + + def set(self, value): + """ + Sets the value of the object + + :param value: + An integer or a tuple of integers 0 and 1 + + :raises: + ValueError - when an invalid value is passed + """ + + if isinstance(value, set): + if self._map is None: + raise ValueError(unwrap( + ''' + %s._map has not been defined + ''', + type_name(self) + )) + + bits = [0] * self._size + self._native = value + for index in range(0, self._size): + key = self._map.get(index) + if key is None: + continue + if key in value: + bits[index] = 1 + + value = ''.join(map(str_cls, bits)) + + elif value.__class__ == tuple: + if self._map is None: + self._native = value + else: + self._native = set() + for index, bit in enumerate(value): + if bit: + name = self._map.get(index, index) + self._native.add(name) + value = ''.join(map(str_cls, value)) + + else: + raise TypeError(unwrap( + ''' + %s value must be a tuple of ones and zeros or a set of unicode + strings, not %s + ''', + type_name(self), + type_name(value) + )) + + self._chunk = None + + if self._map is not None: + if len(value) > self._size: + raise ValueError(unwrap( + ''' + %s value must be at most %s bits long, specified was %s long + ''', + type_name(self), + self._size, + len(value) + )) + # A NamedBitList must have trailing zero bit truncated. See + # https://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf + # section 11.2, + # https://tools.ietf.org/html/rfc5280#page-134 and + # https://www.ietf.org/mail-archive/web/pkix/current/msg10443.html + value = value.rstrip('0') + size = len(value) + + size_mod = size % 8 + extra_bits = 0 + if size_mod != 0: + extra_bits = 8 - size_mod + value += '0' * extra_bits + + size_in_bytes = int(math.ceil(size / 8)) + + if extra_bits: + extra_bits_byte = int_to_bytes(extra_bits) + else: + extra_bits_byte = b'\x00' + + if value == '': + value_bytes = b'' + else: + value_bytes = int_to_bytes(int(value, 2)) + if len(value_bytes) != size_in_bytes: + value_bytes = (b'\x00' * (size_in_bytes - len(value_bytes))) + value_bytes + + self.contents = extra_bits_byte + value_bytes + self._header = None + if self._indefinite: + self._indefinite = False + self.method = 0 + if self._trailer != b'': + self._trailer = b'' + + def __getitem__(self, key): + """ + Retrieves a boolean version of one of the bits based on a name from the + _map + + :param key: + The unicode string of one of the bit names + + :raises: + ValueError - when _map is not set or the key name is invalid + + :return: + A boolean if the bit is set + """ + + is_int = isinstance(key, int_types) + if not is_int: + if not isinstance(self._map, dict): + raise ValueError(unwrap( + ''' + %s._map has not been defined + ''', + type_name(self) + )) + + if key not in self._reverse_map: + raise ValueError(unwrap( + ''' + %s._map does not contain an entry for "%s" + ''', + type_name(self), + key + )) + + if self._native is None: + self.native + + if self._map is None: + if len(self._native) >= key + 1: + return bool(self._native[key]) + return False + + if is_int: + key = self._map.get(key, key) + + return key in self._native + + def __setitem__(self, key, value): + """ + Sets one of the bits based on a name from the _map + + :param key: + The unicode string of one of the bit names + + :param value: + A boolean value + + :raises: + ValueError - when _map is not set or the key name is invalid + """ + + is_int = isinstance(key, int_types) + if not is_int: + if self._map is None: + raise ValueError(unwrap( + ''' + %s._map has not been defined + ''', + type_name(self) + )) + + if key not in self._reverse_map: + raise ValueError(unwrap( + ''' + %s._map does not contain an entry for "%s" + ''', + type_name(self), + key + )) + + if self._native is None: + self.native + + if self._map is None: + new_native = list(self._native) + max_key = len(new_native) - 1 + if key > max_key: + new_native.extend([0] * (key - max_key)) + new_native[key] = 1 if value else 0 + self._native = tuple(new_native) + + else: + if is_int: + key = self._map.get(key, key) + + if value: + if key not in self._native: + self._native.add(key) + else: + if key in self._native: + self._native.remove(key) + + self.set(self._native) + + def _as_chunk(self): + """ + Allows reconstructing indefinite length values + + :return: + A tuple of integers + """ + + extra_bits = int_from_bytes(self.contents[0:1]) + bit_string = '{0:b}'.format(int_from_bytes(self.contents[1:])) + byte_len = len(self.contents[1:]) + bit_len = len(bit_string) + + # Left-pad the bit string to a byte multiple to ensure we didn't + # lose any zero bits on the left + mod_bit_len = bit_len % 8 + if mod_bit_len != 0: + bit_string = ('0' * (8 - mod_bit_len)) + bit_string + bit_len = len(bit_string) + + if bit_len // 8 < byte_len: + missing_bytes = byte_len - (bit_len // 8) + bit_string = ('0' * (8 * missing_bytes)) + bit_string + + # Trim off the extra bits on the right used to fill the last byte + if extra_bits > 0: + bit_string = bit_string[0:0 - extra_bits] + + return tuple(map(int, tuple(bit_string))) + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + If a _map is set, a set of names, or if no _map is set, a tuple of + integers 1 and 0. None if no value. + """ + + # For BitString we default the value to be all zeros + if self.contents is None: + if self._map is None: + self.set(()) + else: + self.set(set()) + + if self._native is None: + bits = self._merge_chunks() + if self._map: + self._native = set() + for index, bit in enumerate(bits): + if bit: + name = self._map.get(index, index) + self._native.add(name) + else: + self._native = bits + return self._native + + +class OctetBitString(Constructable, Castable, Primitive): + """ + Represents a bit string in ASN.1 as a Python byte string + """ + + tag = 3 + + # Whenever dealing with octet-based bit strings, we really want the + # bytes, so we just ignore the unused bits portion since it isn't + # applicable to the current use case + # unused_bits = struct.unpack('>B', self.contents[0:1])[0] + _chunks_offset = 1 + + # Instance attribute of (possibly-merged) byte string + _bytes = None + + def set(self, value): + """ + Sets the value of the object + + :param value: + A byte string + + :raises: + ValueError - when an invalid value is passed + """ + + if not isinstance(value, byte_cls): + raise TypeError(unwrap( + ''' + %s value must be a byte string, not %s + ''', + type_name(self), + type_name(value) + )) + + self._bytes = value + # Set the unused bits to 0 + self.contents = b'\x00' + value + self._header = None + if self._indefinite: + self._indefinite = False + self.method = 0 + if self._trailer != b'': + self._trailer = b'' + + def __bytes__(self): + """ + :return: + A byte string + """ + + if self.contents is None: + return b'' + if self._bytes is None: + self._bytes = self._merge_chunks() + return self._bytes + + def _copy(self, other, copy_func): + """ + Copies the contents of another OctetBitString object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + super(OctetBitString, self)._copy(other, copy_func) + self._bytes = other._bytes + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + A byte string or None + """ + + if self.contents is None: + return None + + return self.__bytes__() + + +class IntegerBitString(Constructable, Castable, Primitive): + """ + Represents a bit string in ASN.1 as a Python integer + """ + + tag = 3 + + _chunks_offset = 1 + + def set(self, value): + """ + Sets the value of the object + + :param value: + An integer + + :raises: + ValueError - when an invalid value is passed + """ + + if not isinstance(value, int_types): + raise TypeError(unwrap( + ''' + %s value must be an integer, not %s + ''', + type_name(self), + type_name(value) + )) + + self._native = value + # Set the unused bits to 0 + self.contents = b'\x00' + int_to_bytes(value, signed=True) + self._header = None + if self._indefinite: + self._indefinite = False + self.method = 0 + if self._trailer != b'': + self._trailer = b'' + + def _as_chunk(self): + """ + Allows reconstructing indefinite length values + + :return: + A unicode string of bits - 1s and 0s + """ + + extra_bits = int_from_bytes(self.contents[0:1]) + bit_string = '{0:b}'.format(int_from_bytes(self.contents[1:])) + + # Ensure we have leading zeros since these chunks may be concatenated together + mod_bit_len = len(bit_string) % 8 + if mod_bit_len != 0: + bit_string = ('0' * (8 - mod_bit_len)) + bit_string + + if extra_bits > 0: + return bit_string[0:0 - extra_bits] + + return bit_string + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + An integer or None + """ + + if self.contents is None: + return None + + if self._native is None: + extra_bits = int_from_bytes(self.contents[0:1]) + # Fast path + if not self._indefinite and extra_bits == 0: + self._native = int_from_bytes(self.contents[1:]) + else: + if self._indefinite and extra_bits > 0: + raise ValueError('Constructed bit string has extra bits on indefinite container') + self._native = int(self._merge_chunks(), 2) + return self._native + + +class OctetString(Constructable, Castable, Primitive): + """ + Represents a byte string in both ASN.1 and Python + """ + + tag = 4 + + # Instance attribute of (possibly-merged) byte string + _bytes = None + + def set(self, value): + """ + Sets the value of the object + + :param value: + A byte string + """ + + if not isinstance(value, byte_cls): + raise TypeError(unwrap( + ''' + %s value must be a byte string, not %s + ''', + type_name(self), + type_name(value) + )) + + self._bytes = value + self.contents = value + self._header = None + if self._indefinite: + self._indefinite = False + self.method = 0 + if self._trailer != b'': + self._trailer = b'' + + def __bytes__(self): + """ + :return: + A byte string + """ + + if self.contents is None: + return b'' + if self._bytes is None: + self._bytes = self._merge_chunks() + return self._bytes + + def _copy(self, other, copy_func): + """ + Copies the contents of another OctetString object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + super(OctetString, self)._copy(other, copy_func) + self._bytes = other._bytes + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + A byte string or None + """ + + if self.contents is None: + return None + + return self.__bytes__() + + +class IntegerOctetString(Constructable, Castable, Primitive): + """ + Represents a byte string in ASN.1 as a Python integer + """ + + tag = 4 + + def set(self, value): + """ + Sets the value of the object + + :param value: + An integer + + :raises: + ValueError - when an invalid value is passed + """ + + if not isinstance(value, int_types): + raise TypeError(unwrap( + ''' + %s value must be an integer, not %s + ''', + type_name(self), + type_name(value) + )) + + self._native = value + self.contents = int_to_bytes(value, signed=False) + self._header = None + if self._indefinite: + self._indefinite = False + self.method = 0 + if self._trailer != b'': + self._trailer = b'' + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + An integer or None + """ + + if self.contents is None: + return None + + if self._native is None: + self._native = int_from_bytes(self._merge_chunks()) + return self._native + + +class ParsableOctetString(Constructable, Castable, Primitive): + + tag = 4 + + _parsed = None + + # Instance attribute of (possibly-merged) byte string + _bytes = None + + def __init__(self, value=None, parsed=None, **kwargs): + """ + Allows providing a parsed object that will be serialized to get the + byte string value + + :param value: + A native Python datatype to initialize the object value with + + :param parsed: + If value is None and this is an Asn1Value object, this will be + set as the parsed value, and the value will be obtained by calling + .dump() on this object. + """ + + set_parsed = False + if value is None and parsed is not None and isinstance(parsed, Asn1Value): + value = parsed.dump() + set_parsed = True + + Primitive.__init__(self, value=value, **kwargs) + + if set_parsed: + self._parsed = (parsed, parsed.__class__, None) + + def set(self, value): + """ + Sets the value of the object + + :param value: + A byte string + """ + + if not isinstance(value, byte_cls): + raise TypeError(unwrap( + ''' + %s value must be a byte string, not %s + ''', + type_name(self), + type_name(value) + )) + + self._bytes = value + self.contents = value + self._header = None + if self._indefinite: + self._indefinite = False + self.method = 0 + if self._trailer != b'': + self._trailer = b'' + + def parse(self, spec=None, spec_params=None): + """ + Parses the contents generically, or using a spec with optional params + + :param spec: + A class derived from Asn1Value that defines what class_ and tag the + value should have, and the semantics of the encoded value. The + return value will be of this type. If omitted, the encoded value + will be decoded using the standard universal tag based on the + encoded tag number. + + :param spec_params: + A dict of params to pass to the spec object + + :return: + An object of the type spec, or if not present, a child of Asn1Value + """ + + if self._parsed is None or self._parsed[1:3] != (spec, spec_params): + parsed_value, _ = _parse_build(self.__bytes__(), spec=spec, spec_params=spec_params) + self._parsed = (parsed_value, spec, spec_params) + return self._parsed[0] + + def __bytes__(self): + """ + :return: + A byte string + """ + + if self.contents is None: + return b'' + if self._bytes is None: + self._bytes = self._merge_chunks() + return self._bytes + + def _copy(self, other, copy_func): + """ + Copies the contents of another ParsableOctetString object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + super(ParsableOctetString, self)._copy(other, copy_func) + self._bytes = other._bytes + self._parsed = copy_func(other._parsed) + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + A byte string or None + """ + + if self.contents is None: + return None + + if self._parsed is not None: + return self._parsed[0].native + else: + return self.__bytes__() + + @property + def parsed(self): + """ + Returns the parsed object from .parse() + + :return: + The object returned by .parse() + """ + + if self._parsed is None: + self.parse() + + return self._parsed[0] + + def dump(self, force=False): + """ + Encodes the value using DER + + :param force: + If the encoded contents already exist, clear them and regenerate + to ensure they are in DER format instead of BER format + + :return: + A byte string of the DER-encoded value + """ + + if force: + if self._parsed is not None: + native = self.parsed.dump(force=force) + else: + native = self.native + self.contents = None + self.set(native) + + return Asn1Value.dump(self) + + +class ParsableOctetBitString(ParsableOctetString): + + tag = 3 + + # Whenever dealing with octet-based bit strings, we really want the + # bytes, so we just ignore the unused bits portion since it isn't + # applicable to the current use case + # unused_bits = struct.unpack('>B', self.contents[0:1])[0] + _chunks_offset = 1 + + def set(self, value): + """ + Sets the value of the object + + :param value: + A byte string + + :raises: + ValueError - when an invalid value is passed + """ + + if not isinstance(value, byte_cls): + raise TypeError(unwrap( + ''' + %s value must be a byte string, not %s + ''', + type_name(self), + type_name(value) + )) + + self._bytes = value + # Set the unused bits to 0 + self.contents = b'\x00' + value + self._header = None + if self._indefinite: + self._indefinite = False + self.method = 0 + if self._trailer != b'': + self._trailer = b'' + + +class Null(Primitive): + """ + Represents a null value in ASN.1 as None in Python + """ + + tag = 5 + + contents = b'' + + def set(self, value): + """ + Sets the value of the object + + :param value: + None + """ + + self.contents = b'' + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + None + """ + + return None + + +class ObjectIdentifier(Primitive, ValueMap): + """ + Represents an object identifier in ASN.1 as a Python unicode dotted + integer string + """ + + tag = 6 + + # A unicode string of the dotted form of the object identifier + _dotted = None + + @classmethod + def map(cls, value): + """ + Converts a dotted unicode string OID into a mapped unicode string + + :param value: + A dotted unicode string OID + + :raises: + ValueError - when no _map dict has been defined on the class + TypeError - when value is not a unicode string + + :return: + A mapped unicode string + """ + + if cls._map is None: + raise ValueError(unwrap( + ''' + %s._map has not been defined + ''', + type_name(cls) + )) + + if not isinstance(value, str_cls): + raise TypeError(unwrap( + ''' + value must be a unicode string, not %s + ''', + type_name(value) + )) + + return cls._map.get(value, value) + + @classmethod + def unmap(cls, value): + """ + Converts a mapped unicode string value into a dotted unicode string OID + + :param value: + A mapped unicode string OR dotted unicode string OID + + :raises: + ValueError - when no _map dict has been defined on the class or the value can't be unmapped + TypeError - when value is not a unicode string + + :return: + A dotted unicode string OID + """ + + if cls not in _SETUP_CLASSES: + cls()._setup() + _SETUP_CLASSES[cls] = True + + if cls._map is None: + raise ValueError(unwrap( + ''' + %s._map has not been defined + ''', + type_name(cls) + )) + + if not isinstance(value, str_cls): + raise TypeError(unwrap( + ''' + value must be a unicode string, not %s + ''', + type_name(value) + )) + + if value in cls._reverse_map: + return cls._reverse_map[value] + + if not _OID_RE.match(value): + raise ValueError(unwrap( + ''' + %s._map does not contain an entry for "%s" + ''', + type_name(cls), + value + )) + + return value + + def set(self, value): + """ + Sets the value of the object + + :param value: + A unicode string. May be a dotted integer string, or if _map is + provided, one of the mapped values. + + :raises: + ValueError - when an invalid value is passed + """ + + if not isinstance(value, str_cls): + raise TypeError(unwrap( + ''' + %s value must be a unicode string, not %s + ''', + type_name(self), + type_name(value) + )) + + self._native = value + + if self._map is not None: + if value in self._reverse_map: + value = self._reverse_map[value] + + self.contents = b'' + first = None + for index, part in enumerate(value.split('.')): + part = int(part) + + # The first two parts are merged into a single byte + if index == 0: + first = part + continue + elif index == 1: + part = (first * 40) + part + + encoded_part = chr_cls(0x7F & part) + part = part >> 7 + while part > 0: + encoded_part = chr_cls(0x80 | (0x7F & part)) + encoded_part + part = part >> 7 + self.contents += encoded_part + + self._header = None + if self._trailer != b'': + self._trailer = b'' + + def __unicode__(self): + """ + :return: + A unicode string + """ + + return self.dotted + + @property + def dotted(self): + """ + :return: + A unicode string of the object identifier in dotted notation, thus + ignoring any mapped value + """ + + if self._dotted is None: + output = [] + + part = 0 + for byte in self.contents: + if _PY2: + byte = ord(byte) + part = part * 128 + part += byte & 127 + # Last byte in subidentifier has the eighth bit set to 0 + if byte & 0x80 == 0: + if len(output) == 0: + output.append(str_cls(part // 40)) + output.append(str_cls(part % 40)) + else: + output.append(str_cls(part)) + part = 0 + + self._dotted = '.'.join(output) + return self._dotted + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + A unicode string or None. If _map is not defined, the unicode string + is a string of dotted integers. If _map is defined and the dotted + string is present in the _map, the mapped value is returned. + """ + + if self.contents is None: + return None + + if self._native is None: + self._native = self.dotted + if self._map is not None and self._native in self._map: + self._native = self._map[self._native] + return self._native + + +class ObjectDescriptor(Primitive): + """ + Represents an object descriptor from ASN.1 - no Python implementation + """ + + tag = 7 + + +class InstanceOf(Primitive): + """ + Represents an instance from ASN.1 - no Python implementation + """ + + tag = 8 + + +class Real(Primitive): + """ + Represents a real number from ASN.1 - no Python implementation + """ + + tag = 9 + + +class Enumerated(Integer): + """ + Represents a enumerated list of integers from ASN.1 as a Python + unicode string + """ + + tag = 10 + + def set(self, value): + """ + Sets the value of the object + + :param value: + An integer or a unicode string from _map + + :raises: + ValueError - when an invalid value is passed + """ + + if not isinstance(value, int_types) and not isinstance(value, str_cls): + raise TypeError(unwrap( + ''' + %s value must be an integer or a unicode string, not %s + ''', + type_name(self), + type_name(value) + )) + + if isinstance(value, str_cls): + if value not in self._reverse_map: + raise ValueError(unwrap( + ''' + %s value "%s" is not a valid value + ''', + type_name(self), + value + )) + + value = self._reverse_map[value] + + elif value not in self._map: + raise ValueError(unwrap( + ''' + %s value %s is not a valid value + ''', + type_name(self), + value + )) + + Integer.set(self, value) + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + A unicode string or None + """ + + if self.contents is None: + return None + + if self._native is None: + self._native = self._map[self.__int__()] + return self._native + + +class UTF8String(AbstractString): + """ + Represents a UTF-8 string from ASN.1 as a Python unicode string + """ + + tag = 12 + _encoding = 'utf-8' + + +class RelativeOid(ObjectIdentifier): + """ + Represents an object identifier in ASN.1 as a Python unicode dotted + integer string + """ + + tag = 13 + + +class Sequence(Asn1Value): + """ + Represents a sequence of fields from ASN.1 as a Python object with a + dict-like interface + """ + + tag = 16 + + class_ = 0 + method = 1 + + # A list of child objects, in order of _fields + children = None + + # Sequence overrides .contents to be a property so that the mutated state + # of child objects can be checked to ensure everything is up-to-date + _contents = None + + # Variable to track if the object has been mutated + _mutated = False + + # A list of tuples in one of the following forms. + # + # Option 1, a unicode string field name and a value class + # + # ("name", Asn1ValueClass) + # + # Option 2, same as Option 1, but with a dict of class params + # + # ("name", Asn1ValueClass, {'explicit': 5}) + _fields = [] + + # A dict with keys being the name of a field and the value being a unicode + # string of the method name on self to call to get the spec for that field + _spec_callbacks = None + + # A dict that maps unicode string field names to an index in _fields + _field_map = None + + # A list in the same order as _fields that has tuples in the form (class_, tag) + _field_ids = None + + # An optional 2-element tuple that defines the field names of an OID field + # and the field that the OID should be used to help decode. Works with the + # _oid_specs attribute. + _oid_pair = None + + # A dict with keys that are unicode string OID values and values that are + # Asn1Value classes to use for decoding a variable-type field. + _oid_specs = None + + # A 2-element tuple of the indexes in _fields of the OID and value fields + _oid_nums = None + + # Predetermined field specs to optimize away calls to _determine_spec() + _precomputed_specs = None + + def __init__(self, value=None, default=None, **kwargs): + """ + Allows setting field values before passing everything else along to + Asn1Value.__init__() + + :param value: + A native Python datatype to initialize the object value with + + :param default: + The default value if no value is specified + """ + + Asn1Value.__init__(self, **kwargs) + + check_existing = False + if value is None and default is not None: + check_existing = True + if self.children is None: + if self.contents is None: + check_existing = False + else: + self._parse_children() + value = default + + if value is not None: + try: + # Fields are iterated in definition order to allow things like + # OID-based specs. Otherwise sometimes the value would be processed + # before the OID field, resulting in invalid value object creation. + if self._fields: + keys = [info[0] for info in self._fields] + unused_keys = set(value.keys()) + else: + keys = value.keys() + unused_keys = set(keys) + + for key in keys: + # If we are setting defaults, but a real value has already + # been set for the field, then skip it + if check_existing: + index = self._field_map[key] + if index < len(self.children) and self.children[index] is not VOID: + if key in unused_keys: + unused_keys.remove(key) + continue + + if key in value: + self.__setitem__(key, value[key]) + unused_keys.remove(key) + + if len(unused_keys): + raise ValueError(unwrap( + ''' + One or more unknown fields was passed to the constructor + of %s: %s + ''', + type_name(self), + ', '.join(sorted(list(unused_keys))) + )) + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args + raise e + + @property + def contents(self): + """ + :return: + A byte string of the DER-encoded contents of the sequence + """ + + if self.children is None: + return self._contents + + if self._is_mutated(): + self._set_contents() + + return self._contents + + @contents.setter + def contents(self, value): + """ + :param value: + A byte string of the DER-encoded contents of the sequence + """ + + self._contents = value + + def _is_mutated(self): + """ + :return: + A boolean - if the sequence or any children (recursively) have been + mutated + """ + + mutated = self._mutated + if self.children is not None: + for child in self.children: + if isinstance(child, Sequence) or isinstance(child, SequenceOf): + mutated = mutated or child._is_mutated() + + return mutated + + def _lazy_child(self, index): + """ + Builds a child object if the child has only been parsed into a tuple so far + """ + + child = self.children[index] + if child.__class__ == tuple: + child = self.children[index] = _build(*child) + return child + + def __len__(self): + """ + :return: + Integer + """ + # We inline this check to prevent method invocation each time + if self.children is None: + self._parse_children() + + return len(self.children) + + def __getitem__(self, key): + """ + Allows accessing fields by name or index + + :param key: + A unicode string of the field name, or an integer of the field index + + :raises: + KeyError - when a field name or index is invalid + + :return: + The Asn1Value object of the field specified + """ + + # We inline this check to prevent method invocation each time + if self.children is None: + self._parse_children() + + if not isinstance(key, int_types): + if key not in self._field_map: + raise KeyError(unwrap( + ''' + No field named "%s" defined for %s + ''', + key, + type_name(self) + )) + key = self._field_map[key] + + if key >= len(self.children): + raise KeyError(unwrap( + ''' + No field numbered %s is present in this %s + ''', + key, + type_name(self) + )) + + try: + return self._lazy_child(key) + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args + raise e + + def __setitem__(self, key, value): + """ + Allows settings fields by name or index + + :param key: + A unicode string of the field name, or an integer of the field index + + :param value: + A native Python datatype to set the field value to. This method will + construct the appropriate Asn1Value object from _fields. + + :raises: + ValueError - when a field name or index is invalid + """ + + # We inline this check to prevent method invocation each time + if self.children is None: + self._parse_children() + + if not isinstance(key, int_types): + if key not in self._field_map: + raise KeyError(unwrap( + ''' + No field named "%s" defined for %s + ''', + key, + type_name(self) + )) + key = self._field_map[key] + + field_name, field_spec, value_spec, field_params, _ = self._determine_spec(key) + + new_value = self._make_value(field_name, field_spec, value_spec, field_params, value) + + invalid_value = False + if isinstance(new_value, Any): + invalid_value = new_value.parsed is None + elif isinstance(new_value, Choice): + invalid_value = new_value.chosen.contents is None + else: + invalid_value = new_value.contents is None + + if invalid_value: + raise ValueError(unwrap( + ''' + Value for field "%s" of %s is not set + ''', + field_name, + type_name(self) + )) + + self.children[key] = new_value + + if self._native is not None: + self._native[self._fields[key][0]] = self.children[key].native + self._mutated = True + + def __delitem__(self, key): + """ + Allows deleting optional or default fields by name or index + + :param key: + A unicode string of the field name, or an integer of the field index + + :raises: + ValueError - when a field name or index is invalid, or the field is not optional or defaulted + """ + + # We inline this check to prevent method invocation each time + if self.children is None: + self._parse_children() + + if not isinstance(key, int_types): + if key not in self._field_map: + raise KeyError(unwrap( + ''' + No field named "%s" defined for %s + ''', + key, + type_name(self) + )) + key = self._field_map[key] + + name, _, params = self._fields[key] + if not params or ('default' not in params and 'optional' not in params): + raise ValueError(unwrap( + ''' + Can not delete the value for the field "%s" of %s since it is + not optional or defaulted + ''', + name, + type_name(self) + )) + + if 'optional' in params: + self.children[key] = VOID + if self._native is not None: + self._native[name] = None + else: + self.__setitem__(key, None) + self._mutated = True + + def __iter__(self): + """ + :return: + An iterator of field key names + """ + + for info in self._fields: + yield info[0] + + def _set_contents(self, force=False): + """ + Updates the .contents attribute of the value with the encoded value of + all of the child objects + + :param force: + Ensure all contents are in DER format instead of possibly using + cached BER-encoded data + """ + + if self.children is None: + self._parse_children() + + contents = BytesIO() + for index, info in enumerate(self._fields): + child = self.children[index] + if child is None: + child_dump = b'' + elif child.__class__ == tuple: + if force: + child_dump = self._lazy_child(index).dump(force=force) + else: + child_dump = child[3] + child[4] + child[5] + else: + child_dump = child.dump(force=force) + # Skip values that are the same as the default + if info[2] and 'default' in info[2]: + default_value = info[1](**info[2]) + if default_value.dump() == child_dump: + continue + contents.write(child_dump) + self._contents = contents.getvalue() + + self._header = None + if self._trailer != b'': + self._trailer = b'' + + def _setup(self): + """ + Generates _field_map, _field_ids and _oid_nums for use in parsing + """ + + cls = self.__class__ + cls._field_map = {} + cls._field_ids = [] + cls._precomputed_specs = [] + for index, field in enumerate(cls._fields): + if len(field) < 3: + field = field + ({},) + cls._fields[index] = field + cls._field_map[field[0]] = index + cls._field_ids.append(_build_id_tuple(field[2], field[1])) + + if cls._oid_pair is not None: + cls._oid_nums = (cls._field_map[cls._oid_pair[0]], cls._field_map[cls._oid_pair[1]]) + + for index, field in enumerate(cls._fields): + has_callback = cls._spec_callbacks is not None and field[0] in cls._spec_callbacks + is_mapped_oid = cls._oid_nums is not None and cls._oid_nums[1] == index + if has_callback or is_mapped_oid: + cls._precomputed_specs.append(None) + else: + cls._precomputed_specs.append((field[0], field[1], field[1], field[2], None)) + + def _determine_spec(self, index): + """ + Determine how a value for a field should be constructed + + :param index: + The field number + + :return: + A tuple containing the following elements: + - unicode string of the field name + - Asn1Value class of the field spec + - Asn1Value class of the value spec + - None or dict of params to pass to the field spec + - None or Asn1Value class indicating the value spec was derived from an OID or a spec callback + """ + + name, field_spec, field_params = self._fields[index] + value_spec = field_spec + spec_override = None + + if self._spec_callbacks is not None and name in self._spec_callbacks: + callback = self._spec_callbacks[name] + spec_override = callback(self) + if spec_override: + # Allow a spec callback to specify both the base spec and + # the override, for situations such as OctetString and parse_as + if spec_override.__class__ == tuple and len(spec_override) == 2: + field_spec, value_spec = spec_override + if value_spec is None: + value_spec = field_spec + spec_override = None + # When no field spec is specified, use a single return value as that + elif field_spec is None: + field_spec = spec_override + value_spec = field_spec + spec_override = None + else: + value_spec = spec_override + + elif self._oid_nums is not None and self._oid_nums[1] == index: + oid = self._lazy_child(self._oid_nums[0]).native + if oid in self._oid_specs: + spec_override = self._oid_specs[oid] + value_spec = spec_override + + return (name, field_spec, value_spec, field_params, spec_override) + + def _make_value(self, field_name, field_spec, value_spec, field_params, value): + """ + Contructs an appropriate Asn1Value object for a field + + :param field_name: + A unicode string of the field name + + :param field_spec: + An Asn1Value class that is the field spec + + :param value_spec: + An Asn1Value class that is the vaue spec + + :param field_params: + None or a dict of params for the field spec + + :param value: + The value to construct an Asn1Value object from + + :return: + An instance of a child class of Asn1Value + """ + + if value is None and 'optional' in field_params: + return VOID + + specs_different = field_spec != value_spec + is_any = issubclass(field_spec, Any) + + if issubclass(value_spec, Choice): + if not isinstance(value, Asn1Value): + raise ValueError(unwrap( + ''' + Can not set a native python value to %s, which has the + choice type of %s - value must be an instance of Asn1Value + ''', + field_name, + type_name(value_spec) + )) + if not isinstance(value, value_spec): + wrapper = value_spec() + wrapper.validate(value.class_, value.tag, value.contents) + wrapper._parsed = value + new_value = wrapper + else: + new_value = value + + elif isinstance(value, field_spec): + new_value = value + if specs_different: + new_value.parse(value_spec) + + elif (not specs_different or is_any) and not isinstance(value, value_spec): + new_value = value_spec(value, **field_params) + + else: + if isinstance(value, value_spec): + new_value = value + else: + new_value = value_spec(value) + + # For when the field is OctetString or OctetBitString with embedded + # values we need to wrap the value in the field spec to get the + # appropriate encoded value. + if specs_different and not is_any: + wrapper = field_spec(value=new_value.dump(), **field_params) + wrapper._parsed = (new_value, new_value.__class__, None) + new_value = wrapper + + new_value = _fix_tagging(new_value, field_params) + + return new_value + + def _parse_children(self, recurse=False): + """ + Parses the contents and generates Asn1Value objects based on the + definitions from _fields. + + :param recurse: + If child objects that are Sequence or SequenceOf objects should + be recursively parsed + + :raises: + ValueError - when an error occurs parsing child objects + """ + + cls = self.__class__ + if self._contents is None: + if self._fields: + self.children = [VOID] * len(self._fields) + for index, (_, _, params) in enumerate(self._fields): + if 'default' in params: + if cls._precomputed_specs[index]: + field_name, field_spec, value_spec, field_params, _ = cls._precomputed_specs[index] + else: + field_name, field_spec, value_spec, field_params, _ = self._determine_spec(index) + self.children[index] = self._make_value(field_name, field_spec, value_spec, field_params, None) + return + + try: + self.children = [] + contents_length = len(self._contents) + child_pointer = 0 + field = 0 + field_len = len(self._fields) + parts = None + again = child_pointer < contents_length + while again: + if parts is None: + parts, child_pointer = _parse(self._contents, contents_length, pointer=child_pointer) + again = child_pointer < contents_length + + if field < field_len: + _, field_spec, value_spec, field_params, spec_override = ( + cls._precomputed_specs[field] or self._determine_spec(field)) + + # If the next value is optional or default, allow it to be absent + if field_params and ('optional' in field_params or 'default' in field_params): + if self._field_ids[field] != (parts[0], parts[2]) and field_spec != Any: + + # See if the value is a valid choice before assuming + # that we have a missing optional or default value + choice_match = False + if issubclass(field_spec, Choice): + try: + tester = field_spec(**field_params) + tester.validate(parts[0], parts[2], parts[4]) + choice_match = True + except (ValueError): + pass + + if not choice_match: + if 'optional' in field_params: + self.children.append(VOID) + else: + self.children.append(field_spec(**field_params)) + field += 1 + again = True + continue + + if field_spec is None or (spec_override and issubclass(field_spec, Any)): + field_spec = value_spec + spec_override = None + + if spec_override: + child = parts + (field_spec, field_params, value_spec) + else: + child = parts + (field_spec, field_params) + + # Handle situations where an optional or defaulted field definition is incorrect + elif field_len > 0 and field + 1 <= field_len: + missed_fields = [] + prev_field = field - 1 + while prev_field >= 0: + prev_field_info = self._fields[prev_field] + if len(prev_field_info) < 3: + break + if 'optional' in prev_field_info[2] or 'default' in prev_field_info[2]: + missed_fields.append(prev_field_info[0]) + prev_field -= 1 + plural = 's' if len(missed_fields) > 1 else '' + missed_field_names = ', '.join(missed_fields) + raise ValueError(unwrap( + ''' + Data for field %s (%s class, %s method, tag %s) does + not match the field definition%s of %s + ''', + field + 1, + CLASS_NUM_TO_NAME_MAP.get(parts[0]), + METHOD_NUM_TO_NAME_MAP.get(parts[1]), + parts[2], + plural, + missed_field_names + )) + + else: + child = parts + + if recurse: + child = _build(*child) + if isinstance(child, (Sequence, SequenceOf)): + child._parse_children(recurse=True) + + self.children.append(child) + field += 1 + parts = None + + index = len(self.children) + while index < field_len: + name, field_spec, field_params = self._fields[index] + if 'default' in field_params: + self.children.append(field_spec(**field_params)) + elif 'optional' in field_params: + self.children.append(VOID) + else: + raise ValueError(unwrap( + ''' + Field "%s" is missing from structure + ''', + name + )) + index += 1 + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args + raise e + + def spec(self, field_name): + """ + Determines the spec to use for the field specified. Depending on how + the spec is determined (_oid_pair or _spec_callbacks), it may be + necessary to set preceding field values before calling this. Usually + specs, if dynamic, are controlled by a preceding ObjectIdentifier + field. + + :param field_name: + A unicode string of the field name to get the spec for + + :return: + A child class of asn1crypto.core.Asn1Value that the field must be + encoded using + """ + + if not isinstance(field_name, str_cls): + raise TypeError(unwrap( + ''' + field_name must be a unicode string, not %s + ''', + type_name(field_name) + )) + + if self._fields is None: + raise ValueError(unwrap( + ''' + Unable to retrieve spec for field %s in the class %s because + _fields has not been set + ''', + repr(field_name), + type_name(self) + )) + + index = self._field_map[field_name] + info = self._determine_spec(index) + + return info[2] + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + An OrderedDict or None. If an OrderedDict, all child values are + recursively converted to native representation also. + """ + + if self.contents is None: + return None + + if self._native is None: + if self.children is None: + self._parse_children(recurse=True) + try: + self._native = OrderedDict() + for index, child in enumerate(self.children): + if child.__class__ == tuple: + child = _build(*child) + self.children[index] = child + try: + name = self._fields[index][0] + except (IndexError): + name = str_cls(index) + self._native[name] = child.native + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args + raise e + return self._native + + def _copy(self, other, copy_func): + """ + Copies the contents of another Sequence object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + super(Sequence, self)._copy(other, copy_func) + if self.children is not None: + self.children = [] + for child in other.children: + if child.__class__ == tuple: + self.children.append(child) + else: + self.children.append(child.copy()) + + def debug(self, nest_level=1): + """ + Show the binary data and parsed data in a tree structure + """ + + if self.children is None: + self._parse_children() + + prefix = ' ' * nest_level + _basic_debug(prefix, self) + for field_name in self: + child = self._lazy_child(self._field_map[field_name]) + if child is not VOID: + print('%s Field "%s"' % (prefix, field_name)) + child.debug(nest_level + 3) + + def dump(self, force=False): + """ + Encodes the value using DER + + :param force: + If the encoded contents already exist, clear them and regenerate + to ensure they are in DER format instead of BER format + + :return: + A byte string of the DER-encoded value + """ + + if force: + self._set_contents(force=force) + + if self._fields and self.children is not None: + for index, (field_name, _, params) in enumerate(self._fields): + if self.children[index] is not VOID: + continue + if 'default' in params or 'optional' in params: + continue + raise ValueError(unwrap( + ''' + Field "%s" is missing from structure + ''', + field_name + )) + + return Asn1Value.dump(self) + + +class SequenceOf(Asn1Value): + """ + Represents a sequence (ordered) of a single type of values from ASN.1 as a + Python object with a list-like interface + """ + + tag = 16 + + class_ = 0 + method = 1 + + # A list of child objects + children = None + + # SequenceOf overrides .contents to be a property so that the mutated state + # of child objects can be checked to ensure everything is up-to-date + _contents = None + + # Variable to track if the object has been mutated + _mutated = False + + # An Asn1Value class to use when parsing children + _child_spec = None + + def __init__(self, value=None, default=None, contents=None, spec=None, **kwargs): + """ + Allows setting child objects and the _child_spec via the spec parameter + before passing everything else along to Asn1Value.__init__() + + :param value: + A native Python datatype to initialize the object value with + + :param default: + The default value if no value is specified + + :param contents: + A byte string of the encoded contents of the value + + :param spec: + A class derived from Asn1Value to use to parse children + """ + + if spec: + self._child_spec = spec + + Asn1Value.__init__(self, **kwargs) + + try: + if contents is not None: + self.contents = contents + else: + if value is None and default is not None: + value = default + + if value is not None: + for index, child in enumerate(value): + self.__setitem__(index, child) + + # Make sure a blank list is serialized + if self.contents is None: + self._set_contents() + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while constructing %s' % type_name(self),) + args + raise e + + @property + def contents(self): + """ + :return: + A byte string of the DER-encoded contents of the sequence + """ + + if self.children is None: + return self._contents + + if self._is_mutated(): + self._set_contents() + + return self._contents + + @contents.setter + def contents(self, value): + """ + :param value: + A byte string of the DER-encoded contents of the sequence + """ + + self._contents = value + + def _is_mutated(self): + """ + :return: + A boolean - if the sequence or any children (recursively) have been + mutated + """ + + mutated = self._mutated + if self.children is not None: + for child in self.children: + if isinstance(child, Sequence) or isinstance(child, SequenceOf): + mutated = mutated or child._is_mutated() + + return mutated + + def _lazy_child(self, index): + """ + Builds a child object if the child has only been parsed into a tuple so far + """ + + child = self.children[index] + if child.__class__ == tuple: + child = _build(*child) + self.children[index] = child + return child + + def _make_value(self, value): + """ + Constructs a _child_spec value from a native Python data type, or + an appropriate Asn1Value object + + :param value: + A native Python value, or some child of Asn1Value + + :return: + An object of type _child_spec + """ + + if isinstance(value, self._child_spec): + new_value = value + + elif issubclass(self._child_spec, Any): + if isinstance(value, Asn1Value): + new_value = value + else: + raise ValueError(unwrap( + ''' + Can not set a native python value to %s where the + _child_spec is Any - value must be an instance of Asn1Value + ''', + type_name(self) + )) + + elif issubclass(self._child_spec, Choice): + if not isinstance(value, Asn1Value): + raise ValueError(unwrap( + ''' + Can not set a native python value to %s where the + _child_spec is the choice type %s - value must be an + instance of Asn1Value + ''', + type_name(self), + self._child_spec.__name__ + )) + if not isinstance(value, self._child_spec): + wrapper = self._child_spec() + wrapper.validate(value.class_, value.tag, value.contents) + wrapper._parsed = value + value = wrapper + new_value = value + + else: + return self._child_spec(value=value) + + params = {} + if self._child_spec.explicit: + params['explicit'] = self._child_spec.explicit + if self._child_spec.implicit: + params['implicit'] = (self._child_spec.class_, self._child_spec.tag) + return _fix_tagging(new_value, params) + + def __len__(self): + """ + :return: + An integer + """ + # We inline this checks to prevent method invocation each time + if self.children is None: + self._parse_children() + + return len(self.children) + + def __getitem__(self, key): + """ + Allows accessing children via index + + :param key: + Integer index of child + """ + + # We inline this checks to prevent method invocation each time + if self.children is None: + self._parse_children() + + return self._lazy_child(key) + + def __setitem__(self, key, value): + """ + Allows overriding a child via index + + :param key: + Integer index of child + + :param value: + Native python datatype that will be passed to _child_spec to create + new child object + """ + + # We inline this checks to prevent method invocation each time + if self.children is None: + self._parse_children() + + new_value = self._make_value(value) + + # If adding at the end, create a space for the new value + if key == len(self.children): + self.children.append(None) + if self._native is not None: + self._native.append(None) + + self.children[key] = new_value + + if self._native is not None: + self._native[key] = self.children[key].native + + self._mutated = True + + def __delitem__(self, key): + """ + Allows removing a child via index + + :param key: + Integer index of child + """ + + # We inline this checks to prevent method invocation each time + if self.children is None: + self._parse_children() + + self.children.pop(key) + if self._native is not None: + self._native.pop(key) + + self._mutated = True + + def __iter__(self): + """ + :return: + An iter() of child objects + """ + + # We inline this checks to prevent method invocation each time + if self.children is None: + self._parse_children() + + for index in range(0, len(self.children)): + yield self._lazy_child(index) + + def __contains__(self, item): + """ + :param item: + An object of the type cls._child_spec + + :return: + A boolean if the item is contained in this SequenceOf + """ + + if item is None or item is VOID: + return False + + if not isinstance(item, self._child_spec): + raise TypeError(unwrap( + ''' + Checking membership in %s is only available for instances of + %s, not %s + ''', + type_name(self), + type_name(self._child_spec), + type_name(item) + )) + + for child in self: + if child == item: + return True + + return False + + def append(self, value): + """ + Allows adding a child to the end of the sequence + + :param value: + Native python datatype that will be passed to _child_spec to create + new child object + """ + + # We inline this checks to prevent method invocation each time + if self.children is None: + self._parse_children() + + self.children.append(self._make_value(value)) + + if self._native is not None: + self._native.append(self.children[-1].native) + + self._mutated = True + + def _set_contents(self, force=False): + """ + Encodes all child objects into the contents for this object + + :param force: + Ensure all contents are in DER format instead of possibly using + cached BER-encoded data + """ + + if self.children is None: + self._parse_children() + + contents = BytesIO() + for child in self: + contents.write(child.dump(force=force)) + self._contents = contents.getvalue() + self._header = None + if self._trailer != b'': + self._trailer = b'' + + def _parse_children(self, recurse=False): + """ + Parses the contents and generates Asn1Value objects based on the + definitions from _child_spec. + + :param recurse: + If child objects that are Sequence or SequenceOf objects should + be recursively parsed + + :raises: + ValueError - when an error occurs parsing child objects + """ + + try: + self.children = [] + if self._contents is None: + return + contents_length = len(self._contents) + child_pointer = 0 + while child_pointer < contents_length: + parts, child_pointer = _parse(self._contents, contents_length, pointer=child_pointer) + if self._child_spec: + child = parts + (self._child_spec,) + else: + child = parts + if recurse: + child = _build(*child) + if isinstance(child, (Sequence, SequenceOf)): + child._parse_children(recurse=True) + self.children.append(child) + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args + raise e + + def spec(self): + """ + Determines the spec to use for child values. + + :return: + A child class of asn1crypto.core.Asn1Value that child values must be + encoded using + """ + + return self._child_spec + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + A list or None. If a list, all child values are recursively + converted to native representation also. + """ + + if self.contents is None: + return None + + if self._native is None: + if self.children is None: + self._parse_children(recurse=True) + try: + self._native = [child.native for child in self] + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args + raise e + return self._native + + def _copy(self, other, copy_func): + """ + Copies the contents of another SequenceOf object to itself + + :param object: + Another instance of the same class + + :param copy_func: + An reference of copy.copy() or copy.deepcopy() to use when copying + lists, dicts and objects + """ + + super(SequenceOf, self)._copy(other, copy_func) + if self.children is not None: + self.children = [] + for child in other.children: + if child.__class__ == tuple: + self.children.append(child) + else: + self.children.append(child.copy()) + + def debug(self, nest_level=1): + """ + Show the binary data and parsed data in a tree structure + """ + + if self.children is None: + self._parse_children() + + prefix = ' ' * nest_level + _basic_debug(prefix, self) + for child in self: + child.debug(nest_level + 1) + + def dump(self, force=False): + """ + Encodes the value using DER + + :param force: + If the encoded contents already exist, clear them and regenerate + to ensure they are in DER format instead of BER format + + :return: + A byte string of the DER-encoded value + """ + + if force: + self._set_contents(force=force) + + return Asn1Value.dump(self) + + +class Set(Sequence): + """ + Represents a set of fields (unordered) from ASN.1 as a Python object with a + dict-like interface + """ + + method = 1 + class_ = 0 + tag = 17 + + # A dict of 2-element tuples in the form (class_, tag) as keys and integers + # as values that are the index of the field in _fields + _field_ids = None + + def _setup(self): + """ + Generates _field_map, _field_ids and _oid_nums for use in parsing + """ + + cls = self.__class__ + cls._field_map = {} + cls._field_ids = {} + cls._precomputed_specs = [] + for index, field in enumerate(cls._fields): + if len(field) < 3: + field = field + ({},) + cls._fields[index] = field + cls._field_map[field[0]] = index + cls._field_ids[_build_id_tuple(field[2], field[1])] = index + + if cls._oid_pair is not None: + cls._oid_nums = (cls._field_map[cls._oid_pair[0]], cls._field_map[cls._oid_pair[1]]) + + for index, field in enumerate(cls._fields): + has_callback = cls._spec_callbacks is not None and field[0] in cls._spec_callbacks + is_mapped_oid = cls._oid_nums is not None and cls._oid_nums[1] == index + if has_callback or is_mapped_oid: + cls._precomputed_specs.append(None) + else: + cls._precomputed_specs.append((field[0], field[1], field[1], field[2], None)) + + def _parse_children(self, recurse=False): + """ + Parses the contents and generates Asn1Value objects based on the + definitions from _fields. + + :param recurse: + If child objects that are Sequence or SequenceOf objects should + be recursively parsed + + :raises: + ValueError - when an error occurs parsing child objects + """ + + cls = self.__class__ + if self._contents is None: + if self._fields: + self.children = [VOID] * len(self._fields) + for index, (_, _, params) in enumerate(self._fields): + if 'default' in params: + if cls._precomputed_specs[index]: + field_name, field_spec, value_spec, field_params, _ = cls._precomputed_specs[index] + else: + field_name, field_spec, value_spec, field_params, _ = self._determine_spec(index) + self.children[index] = self._make_value(field_name, field_spec, value_spec, field_params, None) + return + + try: + child_map = {} + contents_length = len(self.contents) + child_pointer = 0 + seen_field = 0 + while child_pointer < contents_length: + parts, child_pointer = _parse(self.contents, contents_length, pointer=child_pointer) + + id_ = (parts[0], parts[2]) + + field = self._field_ids.get(id_) + if field is None: + raise ValueError(unwrap( + ''' + Data for field %s (%s class, %s method, tag %s) does + not match any of the field definitions + ''', + seen_field, + CLASS_NUM_TO_NAME_MAP.get(parts[0]), + METHOD_NUM_TO_NAME_MAP.get(parts[1]), + parts[2], + )) + + _, field_spec, value_spec, field_params, spec_override = ( + cls._precomputed_specs[field] or self._determine_spec(field)) + + if field_spec is None or (spec_override and issubclass(field_spec, Any)): + field_spec = value_spec + spec_override = None + + if spec_override: + child = parts + (field_spec, field_params, value_spec) + else: + child = parts + (field_spec, field_params) + + if recurse: + child = _build(*child) + if isinstance(child, (Sequence, SequenceOf)): + child._parse_children(recurse=True) + + child_map[field] = child + seen_field += 1 + + total_fields = len(self._fields) + + for index in range(0, total_fields): + if index in child_map: + continue + + name, field_spec, value_spec, field_params, spec_override = ( + cls._precomputed_specs[index] or self._determine_spec(index)) + + if field_spec is None or (spec_override and issubclass(field_spec, Any)): + field_spec = value_spec + spec_override = None + + missing = False + + if not field_params: + missing = True + elif 'optional' not in field_params and 'default' not in field_params: + missing = True + elif 'optional' in field_params: + child_map[index] = VOID + elif 'default' in field_params: + child_map[index] = field_spec(**field_params) + + if missing: + raise ValueError(unwrap( + ''' + Missing required field "%s" from %s + ''', + name, + type_name(self) + )) + + self.children = [] + for index in range(0, total_fields): + self.children.append(child_map[index]) + + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args + raise e + + def _set_contents(self, force=False): + """ + Encodes all child objects into the contents for this object. + + This method is overridden because a Set needs to be encoded by + removing defaulted fields and then sorting the fields by tag. + + :param force: + Ensure all contents are in DER format instead of possibly using + cached BER-encoded data + """ + + if self.children is None: + self._parse_children() + + child_tag_encodings = [] + for index, child in enumerate(self.children): + child_encoding = child.dump(force=force) + + # Skip encoding defaulted children + name, spec, field_params = self._fields[index] + if 'default' in field_params: + if spec(**field_params).dump() == child_encoding: + continue + + child_tag_encodings.append((child.tag, child_encoding)) + child_tag_encodings.sort(key=lambda ct: ct[0]) + + self._contents = b''.join([ct[1] for ct in child_tag_encodings]) + self._header = None + if self._trailer != b'': + self._trailer = b'' + + +class SetOf(SequenceOf): + """ + Represents a set (unordered) of a single type of values from ASN.1 as a + Python object with a list-like interface + """ + + tag = 17 + + def _set_contents(self, force=False): + """ + Encodes all child objects into the contents for this object. + + This method is overridden because a SetOf needs to be encoded by + sorting the child encodings. + + :param force: + Ensure all contents are in DER format instead of possibly using + cached BER-encoded data + """ + + if self.children is None: + self._parse_children() + + child_encodings = [] + for child in self: + child_encodings.append(child.dump(force=force)) + + self._contents = b''.join(sorted(child_encodings)) + self._header = None + if self._trailer != b'': + self._trailer = b'' + + +class EmbeddedPdv(Sequence): + """ + A sequence structure + """ + + tag = 11 + + +class NumericString(AbstractString): + """ + Represents a numeric string from ASN.1 as a Python unicode string + """ + + tag = 18 + _encoding = 'latin1' + + +class PrintableString(AbstractString): + """ + Represents a printable string from ASN.1 as a Python unicode string + """ + + tag = 19 + _encoding = 'latin1' + + +class TeletexString(AbstractString): + """ + Represents a teletex string from ASN.1 as a Python unicode string + """ + + tag = 20 + _encoding = 'teletex' + + +class VideotexString(OctetString): + """ + Represents a videotex string from ASN.1 as a Python byte string + """ + + tag = 21 + + +class IA5String(AbstractString): + """ + Represents an IA5 string from ASN.1 as a Python unicode string + """ + + tag = 22 + _encoding = 'ascii' + + +class AbstractTime(AbstractString): + """ + Represents a time from ASN.1 as a Python datetime.datetime object + """ + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + A datetime.datetime object in the UTC timezone or None + """ + + if self.contents is None: + return None + + if self._native is None: + string = str_cls(self) + has_timezone = re.search('[-\\+]', string) + + # We don't know what timezone it is in, or it is UTC because of a Z + # suffix, so we just assume UTC + if not has_timezone: + string = string.rstrip('Z') + date = self._date_by_len(string) + self._native = date.replace(tzinfo=timezone.utc) + + else: + # Python 2 doesn't support the %z format code, so we have to manually + # process the timezone offset. + date = self._date_by_len(string[0:-5]) + + hours = int(string[-4:-2]) + minutes = int(string[-2:]) + delta = timedelta(hours=abs(hours), minutes=minutes) + if hours < 0: + date -= delta + else: + date += delta + + self._native = date.replace(tzinfo=timezone.utc) + + return self._native + + +class UTCTime(AbstractTime): + """ + Represents a UTC time from ASN.1 as a Python datetime.datetime object in UTC + """ + + tag = 23 + + def set(self, value): + """ + Sets the value of the object + + :param value: + A unicode string or a datetime.datetime object + + :raises: + ValueError - when an invalid value is passed + """ + + if isinstance(value, datetime): + value = value.strftime('%y%m%d%H%M%SZ') + if _PY2: + value = value.decode('ascii') + + AbstractString.set(self, value) + # Set it to None and let the class take care of converting the next + # time that .native is called + self._native = None + + def _date_by_len(self, string): + """ + Parses a date from a string based on its length + + :param string: + A unicode string to parse + + :return: + A datetime.datetime object or a unicode string + """ + + strlen = len(string) + + year_num = int(string[0:2]) + if year_num < 50: + prefix = '20' + else: + prefix = '19' + + if strlen == 10: + return datetime.strptime(prefix + string, '%Y%m%d%H%M') + + if strlen == 12: + return datetime.strptime(prefix + string, '%Y%m%d%H%M%S') + + return string + + +class GeneralizedTime(AbstractTime): + """ + Represents a generalized time from ASN.1 as a Python datetime.datetime + object or asn1crypto.util.extended_datetime object in UTC + """ + + tag = 24 + + def set(self, value): + """ + Sets the value of the object + + :param value: + A unicode string, a datetime.datetime object or an + asn1crypto.util.extended_datetime object + + :raises: + ValueError - when an invalid value is passed + """ + + if isinstance(value, (datetime, extended_datetime)): + value = value.strftime('%Y%m%d%H%M%SZ') + if _PY2: + value = value.decode('ascii') + + AbstractString.set(self, value) + # Set it to None and let the class take care of converting the next + # time that .native is called + self._native = None + + def _date_by_len(self, string): + """ + Parses a date from a string based on its length + + :param string: + A unicode string to parse + + :return: + A datetime.datetime object, asn1crypto.util.extended_datetime object or + a unicode string + """ + + strlen = len(string) + + date_format = None + if strlen == 10: + date_format = '%Y%m%d%H' + elif strlen == 12: + date_format = '%Y%m%d%H%M' + elif strlen == 14: + date_format = '%Y%m%d%H%M%S' + elif strlen == 18: + date_format = '%Y%m%d%H%M%S.%f' + + if date_format: + if len(string) >= 4 and string[0:4] == '0000': + # Year 2000 shares a calendar with year 0, and is supported natively + t = datetime.strptime('2000' + string[4:], date_format) + return extended_datetime( + 0, + t.month, + t.day, + t.hour, + t.minute, + t.second, + t.microsecond, + t.tzinfo + ) + return datetime.strptime(string, date_format) + + return string + + +class GraphicString(AbstractString): + """ + Represents a graphic string from ASN.1 as a Python unicode string + """ + + tag = 25 + # This is technically not correct since this type can contain any charset + _encoding = 'latin1' + + +class VisibleString(AbstractString): + """ + Represents a visible string from ASN.1 as a Python unicode string + """ + + tag = 26 + _encoding = 'latin1' + + +class GeneralString(AbstractString): + """ + Represents a general string from ASN.1 as a Python unicode string + """ + + tag = 27 + # This is technically not correct since this type can contain any charset + _encoding = 'latin1' + + +class UniversalString(AbstractString): + """ + Represents a universal string from ASN.1 as a Python unicode string + """ + + tag = 28 + _encoding = 'utf-32-be' + + +class CharacterString(AbstractString): + """ + Represents a character string from ASN.1 as a Python unicode string + """ + + tag = 29 + # This is technically not correct since this type can contain any charset + _encoding = 'latin1' + + +class BMPString(AbstractString): + """ + Represents a BMP string from ASN.1 as a Python unicode string + """ + + tag = 30 + _encoding = 'utf-16-be' + + +def _basic_debug(prefix, self): + """ + Prints out basic information about an Asn1Value object. Extracted for reuse + among different classes that customize the debug information. + + :param prefix: + A unicode string of spaces to prefix output line with + + :param self: + The object to print the debugging information about + """ + + print('%s%s Object #%s' % (prefix, type_name(self), id(self))) + if self._header: + print('%s Header: 0x%s' % (prefix, binascii.hexlify(self._header or b'').decode('utf-8'))) + + has_header = self.method is not None and self.class_ is not None and self.tag is not None + if has_header: + method_name = METHOD_NUM_TO_NAME_MAP.get(self.method) + class_name = CLASS_NUM_TO_NAME_MAP.get(self.class_) + + if self.explicit is not None: + for class_, tag in self.explicit: + print( + '%s %s tag %s (explicitly tagged)' % + ( + prefix, + CLASS_NUM_TO_NAME_MAP.get(class_), + tag + ) + ) + if has_header: + print('%s %s %s %s' % (prefix, method_name, class_name, self.tag)) + + elif self.implicit: + if has_header: + print('%s %s %s tag %s (implicitly tagged)' % (prefix, method_name, class_name, self.tag)) + + elif has_header: + print('%s %s %s tag %s' % (prefix, method_name, class_name, self.tag)) + + print('%s Data: 0x%s' % (prefix, binascii.hexlify(self.contents or b'').decode('utf-8'))) + + +def _tag_type_to_explicit_implicit(params): + """ + Converts old-style "tag_type" and "tag" params to "explicit" and "implicit" + + :param params: + A dict of parameters to convert from tag_type/tag to explicit/implicit + """ + + if 'tag_type' in params: + if params['tag_type'] == 'explicit': + params['explicit'] = (params.get('class', 2), params['tag']) + elif params['tag_type'] == 'implicit': + params['implicit'] = (params.get('class', 2), params['tag']) + del params['tag_type'] + del params['tag'] + if 'class' in params: + del params['class'] + + +def _fix_tagging(value, params): + """ + Checks if a value is properly tagged based on the spec, and re/untags as + necessary + + :param value: + An Asn1Value object + + :param params: + A dict of spec params + + :return: + An Asn1Value that is properly tagged + """ + + _tag_type_to_explicit_implicit(params) + + retag = False + if 'implicit' not in params: + if value.implicit is not False: + retag = True + else: + if isinstance(params['implicit'], tuple): + class_, tag = params['implicit'] + else: + tag = params['implicit'] + class_ = 'context' + if value.implicit is False: + retag = True + elif value.class_ != CLASS_NAME_TO_NUM_MAP[class_] or value.tag != tag: + retag = True + + if params.get('explicit') != value.explicit: + retag = True + + if retag: + return value.retag(params) + return value + + +def _build_id_tuple(params, spec): + """ + Builds a 2-element tuple used to identify fields by grabbing the class_ + and tag from an Asn1Value class and the params dict being passed to it + + :param params: + A dict of params to pass to spec + + :param spec: + An Asn1Value class + + :return: + A 2-element integer tuple in the form (class_, tag) + """ + + # Handle situations where the the spec is not known at setup time + if spec is None: + return (None, None) + + required_class = spec.class_ + required_tag = spec.tag + + _tag_type_to_explicit_implicit(params) + + if 'explicit' in params: + if isinstance(params['explicit'], tuple): + required_class, required_tag = params['explicit'] + else: + required_class = 2 + required_tag = params['explicit'] + elif 'implicit' in params: + if isinstance(params['implicit'], tuple): + required_class, required_tag = params['implicit'] + else: + required_class = 2 + required_tag = params['implicit'] + if required_class is not None and not isinstance(required_class, int_types): + required_class = CLASS_NAME_TO_NUM_MAP[required_class] + + required_class = params.get('class_', required_class) + required_tag = params.get('tag', required_tag) + + return (required_class, required_tag) + + +_UNIVERSAL_SPECS = { + 1: Boolean, + 2: Integer, + 3: BitString, + 4: OctetString, + 5: Null, + 6: ObjectIdentifier, + 7: ObjectDescriptor, + 8: InstanceOf, + 9: Real, + 10: Enumerated, + 11: EmbeddedPdv, + 12: UTF8String, + 13: RelativeOid, + 16: Sequence, + 17: Set, + 18: NumericString, + 19: PrintableString, + 20: TeletexString, + 21: VideotexString, + 22: IA5String, + 23: UTCTime, + 24: GeneralizedTime, + 25: GraphicString, + 26: VisibleString, + 27: GeneralString, + 28: UniversalString, + 29: CharacterString, + 30: BMPString +} + + +def _build(class_, method, tag, header, contents, trailer, spec=None, spec_params=None, nested_spec=None): + """ + Builds an Asn1Value object generically, or using a spec with optional params + + :param class_: + An integer representing the ASN.1 class + + :param method: + An integer representing the ASN.1 method + + :param tag: + An integer representing the ASN.1 tag + + :param header: + A byte string of the ASN.1 header (class, method, tag, length) + + :param contents: + A byte string of the ASN.1 value + + :param trailer: + A byte string of any ASN.1 trailer (only used by indefinite length encodings) + + :param spec: + A class derived from Asn1Value that defines what class_ and tag the + value should have, and the semantics of the encoded value. The + return value will be of this type. If omitted, the encoded value + will be decoded using the standard universal tag based on the + encoded tag number. + + :param spec_params: + A dict of params to pass to the spec object + + :param nested_spec: + For certain Asn1Value classes (such as OctetString and BitString), the + contents can be further parsed and interpreted as another Asn1Value. + This parameter controls the spec for that sub-parsing. + + :return: + An object of the type spec, or if not specified, a child of Asn1Value + """ + + if spec_params is not None: + _tag_type_to_explicit_implicit(spec_params) + + if header is None: + return VOID + + header_set = False + + # If an explicit specification was passed in, make sure it matches + if spec is not None: + # If there is explicit tagging and contents, we have to split + # the header and trailer off before we do the parsing + no_explicit = spec_params and 'no_explicit' in spec_params + if not no_explicit and (spec.explicit or (spec_params and 'explicit' in spec_params)): + if spec_params: + value = spec(**spec_params) + else: + value = spec() + original_explicit = value.explicit + explicit_info = reversed(original_explicit) + parsed_class = class_ + parsed_method = method + parsed_tag = tag + to_parse = contents + explicit_header = header + explicit_trailer = trailer or b'' + for expected_class, expected_tag in explicit_info: + if parsed_class != expected_class: + raise ValueError(unwrap( + ''' + Error parsing %s - explicitly-tagged class should have been + %s, but %s was found + ''', + type_name(value), + CLASS_NUM_TO_NAME_MAP.get(expected_class), + CLASS_NUM_TO_NAME_MAP.get(parsed_class, parsed_class) + )) + if parsed_method != 1: + raise ValueError(unwrap( + ''' + Error parsing %s - explicitly-tagged method should have + been %s, but %s was found + ''', + type_name(value), + METHOD_NUM_TO_NAME_MAP.get(1), + METHOD_NUM_TO_NAME_MAP.get(parsed_method, parsed_method) + )) + if parsed_tag != expected_tag: + raise ValueError(unwrap( + ''' + Error parsing %s - explicitly-tagged tag should have been + %s, but %s was found + ''', + type_name(value), + expected_tag, + parsed_tag + )) + info, _ = _parse(to_parse, len(to_parse)) + parsed_class, parsed_method, parsed_tag, parsed_header, to_parse, parsed_trailer = info + explicit_header += parsed_header + explicit_trailer = parsed_trailer + explicit_trailer + + value = _build(*info, spec=spec, spec_params={'no_explicit': True}) + value._header = explicit_header + value._trailer = explicit_trailer + value.explicit = original_explicit + header_set = True + else: + if spec_params: + value = spec(contents=contents, **spec_params) + else: + value = spec(contents=contents) + + if spec is Any: + pass + + elif isinstance(value, Choice): + value.validate(class_, tag, contents) + try: + # Force parsing the Choice now + value.contents = header + value.contents + header = b'' + value.parse() + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while parsing %s' % type_name(value),) + args + raise e + + else: + if class_ != value.class_: + raise ValueError(unwrap( + ''' + Error parsing %s - class should have been %s, but %s was + found + ''', + type_name(value), + CLASS_NUM_TO_NAME_MAP.get(value.class_), + CLASS_NUM_TO_NAME_MAP.get(class_, class_) + )) + if method != value.method: + # Allow parsing a primitive method as constructed if the value + # is indefinite length. This is to allow parsing BER. + ber_indef = method == 1 and value.method == 0 and trailer == b'\x00\x00' + if not ber_indef or not isinstance(value, Constructable): + raise ValueError(unwrap( + ''' + Error parsing %s - method should have been %s, but %s was found + ''', + type_name(value), + METHOD_NUM_TO_NAME_MAP.get(value.method), + METHOD_NUM_TO_NAME_MAP.get(method, method) + )) + else: + value.method = method + value._indefinite = True + if tag != value.tag and tag != value._bad_tag: + raise ValueError(unwrap( + ''' + Error parsing %s - tag should have been %s, but %s was found + ''', + type_name(value), + value.tag, + tag + )) + + # For explicitly tagged, un-speced parsings, we use a generic container + # since we will be parsing the contents and discarding the outer object + # anyway a little further on + elif spec_params and 'explicit' in spec_params: + original_value = Asn1Value(contents=contents, **spec_params) + original_explicit = original_value.explicit + + to_parse = contents + explicit_header = header + explicit_trailer = trailer or b'' + for expected_class, expected_tag in reversed(original_explicit): + info, _ = _parse(to_parse, len(to_parse)) + _, _, _, parsed_header, to_parse, parsed_trailer = info + explicit_header += parsed_header + explicit_trailer = parsed_trailer + explicit_trailer + value = _build(*info, spec=spec, spec_params={'no_explicit': True}) + value._header = header + value._header + value._trailer += trailer or b'' + value.explicit = original_explicit + header_set = True + + # If no spec was specified, allow anything and just process what + # is in the input data + else: + if tag not in _UNIVERSAL_SPECS: + raise ValueError(unwrap( + ''' + Unknown element - %s class, %s method, tag %s + ''', + CLASS_NUM_TO_NAME_MAP.get(class_), + METHOD_NUM_TO_NAME_MAP.get(method), + tag + )) + + spec = _UNIVERSAL_SPECS[tag] + + value = spec(contents=contents, class_=class_) + ber_indef = method == 1 and value.method == 0 and trailer == b'\x00\x00' + if ber_indef and isinstance(value, Constructable): + value._indefinite = True + value.method = method + + if not header_set: + value._header = header + value._trailer = trailer or b'' + + # Destroy any default value that our contents have overwritten + value._native = None + + if nested_spec: + try: + value.parse(nested_spec) + except (ValueError, TypeError) as e: + args = e.args[1:] + e.args = (e.args[0] + '\n while parsing %s' % type_name(value),) + args + raise e + + return value + + +def _parse_build(encoded_data, pointer=0, spec=None, spec_params=None, strict=False): + """ + Parses a byte string generically, or using a spec with optional params + + :param encoded_data: + A byte string that contains BER-encoded data + + :param pointer: + The index in the byte string to parse from + + :param spec: + A class derived from Asn1Value that defines what class_ and tag the + value should have, and the semantics of the encoded value. The + return value will be of this type. If omitted, the encoded value + will be decoded using the standard universal tag based on the + encoded tag number. + + :param spec_params: + A dict of params to pass to the spec object + + :param strict: + A boolean indicating if trailing data should be forbidden - if so, a + ValueError will be raised when trailing data exists + + :return: + A 2-element tuple: + - 0: An object of the type spec, or if not specified, a child of Asn1Value + - 1: An integer indicating how many bytes were consumed + """ + + encoded_len = len(encoded_data) + info, new_pointer = _parse(encoded_data, encoded_len, pointer) + if strict and new_pointer != pointer + encoded_len: + extra_bytes = pointer + encoded_len - new_pointer + raise ValueError('Extra data - %d bytes of trailing data were provided' % extra_bytes) + return (_build(*info, spec=spec, spec_params=spec_params), new_pointer) diff --git a/venv/lib/python2.7/site-packages/asn1crypto/crl.py b/venv/lib/python2.7/site-packages/asn1crypto/crl.py new file mode 100644 index 0000000..84cb168 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/crl.py @@ -0,0 +1,536 @@ +# coding: utf-8 + +""" +ASN.1 type classes for certificate revocation lists (CRL). Exports the +following items: + + - CertificateList() + +Other type classes are defined that help compose the types listed above. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +import hashlib + +from .algos import SignedDigestAlgorithm +from .core import ( + Boolean, + Enumerated, + GeneralizedTime, + Integer, + ObjectIdentifier, + OctetBitString, + ParsableOctetString, + Sequence, + SequenceOf, +) +from .x509 import ( + AuthorityInfoAccessSyntax, + AuthorityKeyIdentifier, + CRLDistributionPoints, + DistributionPointName, + GeneralNames, + Name, + ReasonFlags, + Time, +) + + +# The structures in this file are taken from https://tools.ietf.org/html/rfc5280 + + +class Version(Integer): + _map = { + 0: 'v1', + 1: 'v2', + 2: 'v3', + } + + +class IssuingDistributionPoint(Sequence): + _fields = [ + ('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}), + ('only_contains_user_certs', Boolean, {'implicit': 1, 'default': False}), + ('only_contains_ca_certs', Boolean, {'implicit': 2, 'default': False}), + ('only_some_reasons', ReasonFlags, {'implicit': 3, 'optional': True}), + ('indirect_crl', Boolean, {'implicit': 4, 'default': False}), + ('only_contains_attribute_certs', Boolean, {'implicit': 5, 'default': False}), + ] + + +class TBSCertListExtensionId(ObjectIdentifier): + _map = { + '2.5.29.18': 'issuer_alt_name', + '2.5.29.20': 'crl_number', + '2.5.29.27': 'delta_crl_indicator', + '2.5.29.28': 'issuing_distribution_point', + '2.5.29.35': 'authority_key_identifier', + '2.5.29.46': 'freshest_crl', + '1.3.6.1.5.5.7.1.1': 'authority_information_access', + } + + +class TBSCertListExtension(Sequence): + _fields = [ + ('extn_id', TBSCertListExtensionId), + ('critical', Boolean, {'default': False}), + ('extn_value', ParsableOctetString), + ] + + _oid_pair = ('extn_id', 'extn_value') + _oid_specs = { + 'issuer_alt_name': GeneralNames, + 'crl_number': Integer, + 'delta_crl_indicator': Integer, + 'issuing_distribution_point': IssuingDistributionPoint, + 'authority_key_identifier': AuthorityKeyIdentifier, + 'freshest_crl': CRLDistributionPoints, + 'authority_information_access': AuthorityInfoAccessSyntax, + } + + +class TBSCertListExtensions(SequenceOf): + _child_spec = TBSCertListExtension + + +class CRLReason(Enumerated): + _map = { + 0: 'unspecified', + 1: 'key_compromise', + 2: 'ca_compromise', + 3: 'affiliation_changed', + 4: 'superseded', + 5: 'cessation_of_operation', + 6: 'certificate_hold', + 8: 'remove_from_crl', + 9: 'privilege_withdrawn', + 10: 'aa_compromise', + } + + @property + def human_friendly(self): + """ + :return: + A unicode string with revocation description that is suitable to + show to end-users. Starts with a lower case letter and phrased in + such a way that it makes sense after the phrase "because of" or + "due to". + """ + + return { + 'unspecified': 'an unspecified reason', + 'key_compromise': 'a compromised key', + 'ca_compromise': 'the CA being compromised', + 'affiliation_changed': 'an affiliation change', + 'superseded': 'certificate supersession', + 'cessation_of_operation': 'a cessation of operation', + 'certificate_hold': 'a certificate hold', + 'remove_from_crl': 'removal from the CRL', + 'privilege_withdrawn': 'privilege withdrawl', + 'aa_compromise': 'the AA being compromised', + }[self.native] + + +class CRLEntryExtensionId(ObjectIdentifier): + _map = { + '2.5.29.21': 'crl_reason', + '2.5.29.23': 'hold_instruction_code', + '2.5.29.24': 'invalidity_date', + '2.5.29.29': 'certificate_issuer', + } + + +class CRLEntryExtension(Sequence): + _fields = [ + ('extn_id', CRLEntryExtensionId), + ('critical', Boolean, {'default': False}), + ('extn_value', ParsableOctetString), + ] + + _oid_pair = ('extn_id', 'extn_value') + _oid_specs = { + 'crl_reason': CRLReason, + 'hold_instruction_code': ObjectIdentifier, + 'invalidity_date': GeneralizedTime, + 'certificate_issuer': GeneralNames, + } + + +class CRLEntryExtensions(SequenceOf): + _child_spec = CRLEntryExtension + + +class RevokedCertificate(Sequence): + _fields = [ + ('user_certificate', Integer), + ('revocation_date', Time), + ('crl_entry_extensions', CRLEntryExtensions, {'optional': True}), + ] + + _processed_extensions = False + _critical_extensions = None + _crl_reason_value = None + _invalidity_date_value = None + _certificate_issuer_value = None + _issuer_name = False + + def _set_extensions(self): + """ + Sets common named extensions to private attributes and creates a list + of critical extensions + """ + + self._critical_extensions = set() + + for extension in self['crl_entry_extensions']: + name = extension['extn_id'].native + attribute_name = '_%s_value' % name + if hasattr(self, attribute_name): + setattr(self, attribute_name, extension['extn_value'].parsed) + if extension['critical'].native: + self._critical_extensions.add(name) + + self._processed_extensions = True + + @property + def critical_extensions(self): + """ + Returns a set of the names (or OID if not a known extension) of the + extensions marked as critical + + :return: + A set of unicode strings + """ + + if not self._processed_extensions: + self._set_extensions() + return self._critical_extensions + + @property + def crl_reason_value(self): + """ + This extension indicates the reason that a certificate was revoked. + + :return: + None or a CRLReason object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._crl_reason_value + + @property + def invalidity_date_value(self): + """ + This extension indicates the suspected date/time the private key was + compromised or the certificate became invalid. This would usually be + before the revocation date, which is when the CA processed the + revocation. + + :return: + None or a GeneralizedTime object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._invalidity_date_value + + @property + def certificate_issuer_value(self): + """ + This extension indicates the issuer of the certificate in question, + and is used in indirect CRLs. CRL entries without this extension are + for certificates issued from the last seen issuer. + + :return: + None or an x509.GeneralNames object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._certificate_issuer_value + + @property + def issuer_name(self): + """ + :return: + None, or an asn1crypto.x509.Name object for the issuer of the cert + """ + + if self._issuer_name is False: + self._issuer_name = None + if self.certificate_issuer_value: + for general_name in self.certificate_issuer_value: + if general_name.name == 'directory_name': + self._issuer_name = general_name.chosen + break + return self._issuer_name + + +class RevokedCertificates(SequenceOf): + _child_spec = RevokedCertificate + + +class TbsCertList(Sequence): + _fields = [ + ('version', Version, {'optional': True}), + ('signature', SignedDigestAlgorithm), + ('issuer', Name), + ('this_update', Time), + ('next_update', Time, {'optional': True}), + ('revoked_certificates', RevokedCertificates, {'optional': True}), + ('crl_extensions', TBSCertListExtensions, {'explicit': 0, 'optional': True}), + ] + + +class CertificateList(Sequence): + _fields = [ + ('tbs_cert_list', TbsCertList), + ('signature_algorithm', SignedDigestAlgorithm), + ('signature', OctetBitString), + ] + + _processed_extensions = False + _critical_extensions = None + _issuer_alt_name_value = None + _crl_number_value = None + _delta_crl_indicator_value = None + _issuing_distribution_point_value = None + _authority_key_identifier_value = None + _freshest_crl_value = None + _authority_information_access_value = None + _issuer_cert_urls = None + _delta_crl_distribution_points = None + _sha1 = None + _sha256 = None + + def _set_extensions(self): + """ + Sets common named extensions to private attributes and creates a list + of critical extensions + """ + + self._critical_extensions = set() + + for extension in self['tbs_cert_list']['crl_extensions']: + name = extension['extn_id'].native + attribute_name = '_%s_value' % name + if hasattr(self, attribute_name): + setattr(self, attribute_name, extension['extn_value'].parsed) + if extension['critical'].native: + self._critical_extensions.add(name) + + self._processed_extensions = True + + @property + def critical_extensions(self): + """ + Returns a set of the names (or OID if not a known extension) of the + extensions marked as critical + + :return: + A set of unicode strings + """ + + if not self._processed_extensions: + self._set_extensions() + return self._critical_extensions + + @property + def issuer_alt_name_value(self): + """ + This extension allows associating one or more alternative names with + the issuer of the CRL. + + :return: + None or an x509.GeneralNames object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._issuer_alt_name_value + + @property + def crl_number_value(self): + """ + This extension adds a monotonically increasing number to the CRL and is + used to distinguish different versions of the CRL. + + :return: + None or an Integer object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._crl_number_value + + @property + def delta_crl_indicator_value(self): + """ + This extension indicates a CRL is a delta CRL, and contains the CRL + number of the base CRL that it is a delta from. + + :return: + None or an Integer object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._delta_crl_indicator_value + + @property + def issuing_distribution_point_value(self): + """ + This extension includes information about what types of revocations + and certificates are part of the CRL. + + :return: + None or an IssuingDistributionPoint object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._issuing_distribution_point_value + + @property + def authority_key_identifier_value(self): + """ + This extension helps in identifying the public key with which to + validate the authenticity of the CRL. + + :return: + None or an AuthorityKeyIdentifier object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._authority_key_identifier_value + + @property + def freshest_crl_value(self): + """ + This extension is used in complete CRLs to indicate where a delta CRL + may be located. + + :return: + None or a CRLDistributionPoints object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._freshest_crl_value + + @property + def authority_information_access_value(self): + """ + This extension is used to provide a URL with which to download the + certificate used to sign this CRL. + + :return: + None or an AuthorityInfoAccessSyntax object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._authority_information_access_value + + @property + def issuer(self): + """ + :return: + An asn1crypto.x509.Name object for the issuer of the CRL + """ + + return self['tbs_cert_list']['issuer'] + + @property + def authority_key_identifier(self): + """ + :return: + None or a byte string of the key_identifier from the authority key + identifier extension + """ + + if not self.authority_key_identifier_value: + return None + + return self.authority_key_identifier_value['key_identifier'].native + + @property + def issuer_cert_urls(self): + """ + :return: + A list of unicode strings that are URLs that should contain either + an individual DER-encoded X.509 certificate, or a DER-encoded CMS + message containing multiple certificates + """ + + if self._issuer_cert_urls is None: + self._issuer_cert_urls = [] + if self.authority_information_access_value: + for entry in self.authority_information_access_value: + if entry['access_method'].native == 'ca_issuers': + location = entry['access_location'] + if location.name != 'uniform_resource_identifier': + continue + url = location.native + if url.lower()[0:7] == 'http://': + self._issuer_cert_urls.append(url) + return self._issuer_cert_urls + + @property + def delta_crl_distribution_points(self): + """ + Returns delta CRL URLs - only applies to complete CRLs + + :return: + A list of zero or more DistributionPoint objects + """ + + if self._delta_crl_distribution_points is None: + self._delta_crl_distribution_points = [] + + if self.freshest_crl_value is not None: + for distribution_point in self.freshest_crl_value: + distribution_point_name = distribution_point['distribution_point'] + # RFC 5280 indicates conforming CA should not use the relative form + if distribution_point_name.name == 'name_relative_to_crl_issuer': + continue + # This library is currently only concerned with HTTP-based CRLs + for general_name in distribution_point_name.chosen: + if general_name.name == 'uniform_resource_identifier': + self._delta_crl_distribution_points.append(distribution_point) + + return self._delta_crl_distribution_points + + @property + def signature(self): + """ + :return: + A byte string of the signature + """ + + return self['signature'].native + + @property + def sha1(self): + """ + :return: + The SHA1 hash of the DER-encoded bytes of this certificate list + """ + + if self._sha1 is None: + self._sha1 = hashlib.sha1(self.dump()).digest() + return self._sha1 + + @property + def sha256(self): + """ + :return: + The SHA-256 hash of the DER-encoded bytes of this certificate list + """ + + if self._sha256 is None: + self._sha256 = hashlib.sha256(self.dump()).digest() + return self._sha256 diff --git a/venv/lib/python2.7/site-packages/asn1crypto/csr.py b/venv/lib/python2.7/site-packages/asn1crypto/csr.py new file mode 100644 index 0000000..7ea2848 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/csr.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" +ASN.1 type classes for certificate signing requests (CSR). Exports the +following items: + + - CertificatationRequest() + +Other type classes are defined that help compose the types listed above. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from .algos import SignedDigestAlgorithm +from .core import ( + Any, + Integer, + ObjectIdentifier, + OctetBitString, + Sequence, + SetOf, +) +from .keys import PublicKeyInfo +from .x509 import DirectoryString, Extensions, Name + + +# The structures in this file are taken from https://tools.ietf.org/html/rfc2986 +# and https://tools.ietf.org/html/rfc2985 + + +class Version(Integer): + _map = { + 0: 'v1', + } + + +class CSRAttributeType(ObjectIdentifier): + _map = { + '1.2.840.113549.1.9.7': 'challenge_password', + '1.2.840.113549.1.9.9': 'extended_certificate_attributes', + '1.2.840.113549.1.9.14': 'extension_request', + } + + +class SetOfDirectoryString(SetOf): + _child_spec = DirectoryString + + +class Attribute(Sequence): + _fields = [ + ('type', ObjectIdentifier), + ('values', SetOf, {'spec': Any}), + ] + + +class SetOfAttributes(SetOf): + _child_spec = Attribute + + +class SetOfExtensions(SetOf): + _child_spec = Extensions + + +class CRIAttribute(Sequence): + _fields = [ + ('type', CSRAttributeType), + ('values', Any), + ] + + _oid_pair = ('type', 'values') + _oid_specs = { + 'challenge_password': SetOfDirectoryString, + 'extended_certificate_attributes': SetOfAttributes, + 'extension_request': SetOfExtensions, + } + + +class CRIAttributes(SetOf): + _child_spec = CRIAttribute + + +class CertificationRequestInfo(Sequence): + _fields = [ + ('version', Version), + ('subject', Name), + ('subject_pk_info', PublicKeyInfo), + ('attributes', CRIAttributes, {'implicit': 0, 'optional': True}), + ] + + +class CertificationRequest(Sequence): + _fields = [ + ('certification_request_info', CertificationRequestInfo), + ('signature_algorithm', SignedDigestAlgorithm), + ('signature', OctetBitString), + ] diff --git a/venv/lib/python2.7/site-packages/asn1crypto/keys.py b/venv/lib/python2.7/site-packages/asn1crypto/keys.py new file mode 100644 index 0000000..9a09a31 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/keys.py @@ -0,0 +1,1249 @@ +# coding: utf-8 + +""" +ASN.1 type classes for public and private keys. Exports the following items: + + - DSAPrivateKey() + - ECPrivateKey() + - EncryptedPrivateKeyInfo() + - PrivateKeyInfo() + - PublicKeyInfo() + - RSAPrivateKey() + - RSAPublicKey() + +Other type classes are defined that help compose the types listed above. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +import hashlib +import math + +from ._elliptic_curve import ( + SECP192R1_BASE_POINT, + SECP224R1_BASE_POINT, + SECP256R1_BASE_POINT, + SECP384R1_BASE_POINT, + SECP521R1_BASE_POINT, + PrimeCurve, + PrimePoint, +) +from ._errors import unwrap +from ._types import type_name, str_cls, byte_cls +from .algos import _ForceNullParameters, DigestAlgorithm, EncryptionAlgorithm, RSAESOAEPParams +from .core import ( + Any, + Asn1Value, + BitString, + Choice, + Integer, + IntegerOctetString, + Null, + ObjectIdentifier, + OctetBitString, + OctetString, + ParsableOctetString, + ParsableOctetBitString, + Sequence, + SequenceOf, + SetOf, +) +from .util import int_from_bytes, int_to_bytes + + +class OtherPrimeInfo(Sequence): + """ + Source: https://tools.ietf.org/html/rfc3447#page-46 + """ + + _fields = [ + ('prime', Integer), + ('exponent', Integer), + ('coefficient', Integer), + ] + + +class OtherPrimeInfos(SequenceOf): + """ + Source: https://tools.ietf.org/html/rfc3447#page-46 + """ + + _child_spec = OtherPrimeInfo + + +class RSAPrivateKeyVersion(Integer): + """ + Original Name: Version + Source: https://tools.ietf.org/html/rfc3447#page-45 + """ + + _map = { + 0: 'two-prime', + 1: 'multi', + } + + +class RSAPrivateKey(Sequence): + """ + Source: https://tools.ietf.org/html/rfc3447#page-45 + """ + + _fields = [ + ('version', RSAPrivateKeyVersion), + ('modulus', Integer), + ('public_exponent', Integer), + ('private_exponent', Integer), + ('prime1', Integer), + ('prime2', Integer), + ('exponent1', Integer), + ('exponent2', Integer), + ('coefficient', Integer), + ('other_prime_infos', OtherPrimeInfos, {'optional': True}) + ] + + +class RSAPublicKey(Sequence): + """ + Source: https://tools.ietf.org/html/rfc3447#page-44 + """ + + _fields = [ + ('modulus', Integer), + ('public_exponent', Integer) + ] + + +class DSAPrivateKey(Sequence): + """ + The ASN.1 structure that OpenSSL uses to store a DSA private key that is + not part of a PKCS#8 structure. Reversed engineered from english-language + description on linked OpenSSL documentation page. + + Original Name: None + Source: https://www.openssl.org/docs/apps/dsa.html + """ + + _fields = [ + ('version', Integer), + ('p', Integer), + ('q', Integer), + ('g', Integer), + ('public_key', Integer), + ('private_key', Integer), + ] + + +class _ECPoint(): + """ + In both PublicKeyInfo and PrivateKeyInfo, the EC public key is a byte + string that is encoded as a bit string. This class adds convenience + methods for converting to and from the byte string to a pair of integers + that are the X and Y coordinates. + """ + + @classmethod + def from_coords(cls, x, y): + """ + Creates an ECPoint object from the X and Y integer coordinates of the + point + + :param x: + The X coordinate, as an integer + + :param y: + The Y coordinate, as an integer + + :return: + An ECPoint object + """ + + x_bytes = int(math.ceil(math.log(x, 2) / 8.0)) + y_bytes = int(math.ceil(math.log(y, 2) / 8.0)) + + num_bytes = max(x_bytes, y_bytes) + + byte_string = b'\x04' + byte_string += int_to_bytes(x, width=num_bytes) + byte_string += int_to_bytes(y, width=num_bytes) + + return cls(byte_string) + + def to_coords(self): + """ + Returns the X and Y coordinates for this EC point, as native Python + integers + + :return: + A 2-element tuple containing integers (X, Y) + """ + + data = self.native + first_byte = data[0:1] + + # Uncompressed + if first_byte == b'\x04': + remaining = data[1:] + field_len = len(remaining) // 2 + x = int_from_bytes(remaining[0:field_len]) + y = int_from_bytes(remaining[field_len:]) + return (x, y) + + if first_byte not in set([b'\x02', b'\x03']): + raise ValueError(unwrap( + ''' + Invalid EC public key - first byte is incorrect + ''' + )) + + raise ValueError(unwrap( + ''' + Compressed representations of EC public keys are not supported due + to patent US6252960 + ''' + )) + + +class ECPoint(OctetString, _ECPoint): + + pass + + +class ECPointBitString(OctetBitString, _ECPoint): + + pass + + +class SpecifiedECDomainVersion(Integer): + """ + Source: http://www.secg.org/sec1-v2.pdf page 104 + """ + _map = { + 1: 'ecdpVer1', + 2: 'ecdpVer2', + 3: 'ecdpVer3', + } + + +class FieldType(ObjectIdentifier): + """ + Original Name: None + Source: http://www.secg.org/sec1-v2.pdf page 101 + """ + + _map = { + '1.2.840.10045.1.1': 'prime_field', + '1.2.840.10045.1.2': 'characteristic_two_field', + } + + +class CharacteristicTwoBasis(ObjectIdentifier): + """ + Original Name: None + Source: http://www.secg.org/sec1-v2.pdf page 102 + """ + + _map = { + '1.2.840.10045.1.2.1.1': 'gn_basis', + '1.2.840.10045.1.2.1.2': 'tp_basis', + '1.2.840.10045.1.2.1.3': 'pp_basis', + } + + +class Pentanomial(Sequence): + """ + Source: http://www.secg.org/sec1-v2.pdf page 102 + """ + + _fields = [ + ('k1', Integer), + ('k2', Integer), + ('k3', Integer), + ] + + +class CharacteristicTwo(Sequence): + """ + Original Name: Characteristic-two + Source: http://www.secg.org/sec1-v2.pdf page 101 + """ + + _fields = [ + ('m', Integer), + ('basis', CharacteristicTwoBasis), + ('parameters', Any), + ] + + _oid_pair = ('basis', 'parameters') + _oid_specs = { + 'gn_basis': Null, + 'tp_basis': Integer, + 'pp_basis': Pentanomial, + } + + +class FieldID(Sequence): + """ + Source: http://www.secg.org/sec1-v2.pdf page 100 + """ + + _fields = [ + ('field_type', FieldType), + ('parameters', Any), + ] + + _oid_pair = ('field_type', 'parameters') + _oid_specs = { + 'prime_field': Integer, + 'characteristic_two_field': CharacteristicTwo, + } + + +class Curve(Sequence): + """ + Source: http://www.secg.org/sec1-v2.pdf page 104 + """ + + _fields = [ + ('a', OctetString), + ('b', OctetString), + ('seed', OctetBitString, {'optional': True}), + ] + + +class SpecifiedECDomain(Sequence): + """ + Source: http://www.secg.org/sec1-v2.pdf page 103 + """ + + _fields = [ + ('version', SpecifiedECDomainVersion), + ('field_id', FieldID), + ('curve', Curve), + ('base', ECPoint), + ('order', Integer), + ('cofactor', Integer, {'optional': True}), + ('hash', DigestAlgorithm, {'optional': True}), + ] + + +class NamedCurve(ObjectIdentifier): + """ + Various named curves + + Original Name: None + Source: https://tools.ietf.org/html/rfc3279#page-23, + https://tools.ietf.org/html/rfc5480#page-5 + """ + + _map = { + # https://tools.ietf.org/html/rfc3279#page-23 + '1.2.840.10045.3.0.1': 'c2pnb163v1', + '1.2.840.10045.3.0.2': 'c2pnb163v2', + '1.2.840.10045.3.0.3': 'c2pnb163v3', + '1.2.840.10045.3.0.4': 'c2pnb176w1', + '1.2.840.10045.3.0.5': 'c2tnb191v1', + '1.2.840.10045.3.0.6': 'c2tnb191v2', + '1.2.840.10045.3.0.7': 'c2tnb191v3', + '1.2.840.10045.3.0.8': 'c2onb191v4', + '1.2.840.10045.3.0.9': 'c2onb191v5', + '1.2.840.10045.3.0.10': 'c2pnb208w1', + '1.2.840.10045.3.0.11': 'c2tnb239v1', + '1.2.840.10045.3.0.12': 'c2tnb239v2', + '1.2.840.10045.3.0.13': 'c2tnb239v3', + '1.2.840.10045.3.0.14': 'c2onb239v4', + '1.2.840.10045.3.0.15': 'c2onb239v5', + '1.2.840.10045.3.0.16': 'c2pnb272w1', + '1.2.840.10045.3.0.17': 'c2pnb304w1', + '1.2.840.10045.3.0.18': 'c2tnb359v1', + '1.2.840.10045.3.0.19': 'c2pnb368w1', + '1.2.840.10045.3.0.20': 'c2tnb431r1', + '1.2.840.10045.3.1.2': 'prime192v2', + '1.2.840.10045.3.1.3': 'prime192v3', + '1.2.840.10045.3.1.4': 'prime239v1', + '1.2.840.10045.3.1.5': 'prime239v2', + '1.2.840.10045.3.1.6': 'prime239v3', + # https://tools.ietf.org/html/rfc5480#page-5 + '1.3.132.0.1': 'sect163k1', + '1.3.132.0.15': 'sect163r2', + '1.2.840.10045.3.1.1': 'secp192r1', + '1.3.132.0.33': 'secp224r1', + '1.3.132.0.26': 'sect233k1', + '1.2.840.10045.3.1.7': 'secp256r1', + '1.3.132.0.27': 'sect233r1', + '1.3.132.0.16': 'sect283k1', + '1.3.132.0.17': 'sect283r1', + '1.3.132.0.34': 'secp384r1', + '1.3.132.0.36': 'sect409k1', + '1.3.132.0.37': 'sect409r1', + '1.3.132.0.35': 'secp521r1', + '1.3.132.0.38': 'sect571k1', + '1.3.132.0.39': 'sect571r1', + } + + +class ECDomainParameters(Choice): + """ + Source: http://www.secg.org/sec1-v2.pdf page 102 + """ + + _alternatives = [ + ('specified', SpecifiedECDomain), + ('named', NamedCurve), + ('implicit_ca', Null), + ] + + +class ECPrivateKeyVersion(Integer): + """ + Original Name: None + Source: http://www.secg.org/sec1-v2.pdf page 108 + """ + + _map = { + 1: 'ecPrivkeyVer1', + } + + +class ECPrivateKey(Sequence): + """ + Source: http://www.secg.org/sec1-v2.pdf page 108 + """ + + _fields = [ + ('version', ECPrivateKeyVersion), + ('private_key', IntegerOctetString), + ('parameters', ECDomainParameters, {'explicit': 0, 'optional': True}), + ('public_key', ECPointBitString, {'explicit': 1, 'optional': True}), + ] + + +class DSAParams(Sequence): + """ + Parameters for a DSA public or private key + + Original Name: Dss-Parms + Source: https://tools.ietf.org/html/rfc3279#page-9 + """ + + _fields = [ + ('p', Integer), + ('q', Integer), + ('g', Integer), + ] + + +class Attribute(Sequence): + """ + Source: https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-X.501-198811-S!!PDF-E&type=items page 8 + """ + + _fields = [ + ('type', ObjectIdentifier), + ('values', SetOf, {'spec': Any}), + ] + + +class Attributes(SetOf): + """ + Source: https://tools.ietf.org/html/rfc5208#page-3 + """ + + _child_spec = Attribute + + +class PrivateKeyAlgorithmId(ObjectIdentifier): + """ + These OIDs for various public keys are reused when storing private keys + inside of a PKCS#8 structure + + Original Name: None + Source: https://tools.ietf.org/html/rfc3279 + """ + + _map = { + # https://tools.ietf.org/html/rfc3279#page-19 + '1.2.840.113549.1.1.1': 'rsa', + # https://tools.ietf.org/html/rfc3279#page-18 + '1.2.840.10040.4.1': 'dsa', + # https://tools.ietf.org/html/rfc3279#page-13 + '1.2.840.10045.2.1': 'ec', + } + + +class PrivateKeyAlgorithm(_ForceNullParameters, Sequence): + """ + Original Name: PrivateKeyAlgorithmIdentifier + Source: https://tools.ietf.org/html/rfc5208#page-3 + """ + + _fields = [ + ('algorithm', PrivateKeyAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + _oid_pair = ('algorithm', 'parameters') + _oid_specs = { + 'dsa': DSAParams, + 'ec': ECDomainParameters, + } + + +class PrivateKeyInfo(Sequence): + """ + Source: https://tools.ietf.org/html/rfc5208#page-3 + """ + + _fields = [ + ('version', Integer), + ('private_key_algorithm', PrivateKeyAlgorithm), + ('private_key', ParsableOctetString), + ('attributes', Attributes, {'implicit': 0, 'optional': True}), + ] + + def _private_key_spec(self): + algorithm = self['private_key_algorithm']['algorithm'].native + return { + 'rsa': RSAPrivateKey, + 'dsa': Integer, + 'ec': ECPrivateKey, + }[algorithm] + + _spec_callbacks = { + 'private_key': _private_key_spec + } + + _algorithm = None + _bit_size = None + _public_key = None + _fingerprint = None + + @classmethod + def wrap(cls, private_key, algorithm): + """ + Wraps a private key in a PrivateKeyInfo structure + + :param private_key: + A byte string or Asn1Value object of the private key + + :param algorithm: + A unicode string of "rsa", "dsa" or "ec" + + :return: + A PrivateKeyInfo object + """ + + if not isinstance(private_key, byte_cls) and not isinstance(private_key, Asn1Value): + raise TypeError(unwrap( + ''' + private_key must be a byte string or Asn1Value, not %s + ''', + type_name(private_key) + )) + + if algorithm == 'rsa': + if not isinstance(private_key, RSAPrivateKey): + private_key = RSAPrivateKey.load(private_key) + params = Null() + elif algorithm == 'dsa': + if not isinstance(private_key, DSAPrivateKey): + private_key = DSAPrivateKey.load(private_key) + params = DSAParams() + params['p'] = private_key['p'] + params['q'] = private_key['q'] + params['g'] = private_key['g'] + public_key = private_key['public_key'] + private_key = private_key['private_key'] + elif algorithm == 'ec': + if not isinstance(private_key, ECPrivateKey): + private_key = ECPrivateKey.load(private_key) + else: + private_key = private_key.copy() + params = private_key['parameters'] + del private_key['parameters'] + else: + raise ValueError(unwrap( + ''' + algorithm must be one of "rsa", "dsa", "ec", not %s + ''', + repr(algorithm) + )) + + private_key_algo = PrivateKeyAlgorithm() + private_key_algo['algorithm'] = PrivateKeyAlgorithmId(algorithm) + private_key_algo['parameters'] = params + + container = cls() + container._algorithm = algorithm + container['version'] = Integer(0) + container['private_key_algorithm'] = private_key_algo + container['private_key'] = private_key + + # Here we save the DSA public key if possible since it is not contained + # within the PKCS#8 structure for a DSA key + if algorithm == 'dsa': + container._public_key = public_key + + return container + + def _compute_public_key(self): + """ + Computes the public key corresponding to the current private key. + + :return: + For RSA keys, an RSAPublicKey object. For DSA keys, an Integer + object. For EC keys, an ECPointBitString. + """ + + if self.algorithm == 'dsa': + params = self['private_key_algorithm']['parameters'] + return Integer(pow( + params['g'].native, + self['private_key'].parsed.native, + params['p'].native + )) + + if self.algorithm == 'rsa': + key = self['private_key'].parsed + return RSAPublicKey({ + 'modulus': key['modulus'], + 'public_exponent': key['public_exponent'], + }) + + if self.algorithm == 'ec': + curve_type, details = self.curve + + if curve_type == 'implicit_ca': + raise ValueError(unwrap( + ''' + Unable to compute public key for EC key using Implicit CA + parameters + ''' + )) + + if curve_type == 'specified': + if details['field_id']['field_type'] == 'characteristic_two_field': + raise ValueError(unwrap( + ''' + Unable to compute public key for EC key over a + characteristic two field + ''' + )) + + curve = PrimeCurve( + details['field_id']['parameters'], + int_from_bytes(details['curve']['a']), + int_from_bytes(details['curve']['b']) + ) + base_x, base_y = self['private_key_algorithm']['parameters'].chosen['base'].to_coords() + base_point = PrimePoint(curve, base_x, base_y) + + elif curve_type == 'named': + if details not in ('secp192r1', 'secp224r1', 'secp256r1', 'secp384r1', 'secp521r1'): + raise ValueError(unwrap( + ''' + Unable to compute public key for EC named curve %s, + parameters not currently included + ''', + details + )) + + base_point = { + 'secp192r1': SECP192R1_BASE_POINT, + 'secp224r1': SECP224R1_BASE_POINT, + 'secp256r1': SECP256R1_BASE_POINT, + 'secp384r1': SECP384R1_BASE_POINT, + 'secp521r1': SECP521R1_BASE_POINT, + }[details] + + public_point = base_point * self['private_key'].parsed['private_key'].native + return ECPointBitString.from_coords(public_point.x, public_point.y) + + def unwrap(self): + """ + Unwraps the private key into an RSAPrivateKey, DSAPrivateKey or + ECPrivateKey object + + :return: + An RSAPrivateKey, DSAPrivateKey or ECPrivateKey object + """ + + if self.algorithm == 'rsa': + return self['private_key'].parsed + + if self.algorithm == 'dsa': + params = self['private_key_algorithm']['parameters'] + return DSAPrivateKey({ + 'version': 0, + 'p': params['p'], + 'q': params['q'], + 'g': params['g'], + 'public_key': self.public_key, + 'private_key': self['private_key'].parsed, + }) + + if self.algorithm == 'ec': + output = self['private_key'].parsed + output['parameters'] = self['private_key_algorithm']['parameters'] + output['public_key'] = self.public_key + return output + + @property + def curve(self): + """ + Returns information about the curve used for an EC key + + :raises: + ValueError - when the key is not an EC key + + :return: + A two-element tuple, with the first element being a unicode string + of "implicit_ca", "specified" or "named". If the first element is + "implicit_ca", the second is None. If "specified", the second is + an OrderedDict that is the native version of SpecifiedECDomain. If + "named", the second is a unicode string of the curve name. + """ + + if self.algorithm != 'ec': + raise ValueError(unwrap( + ''' + Only EC keys have a curve, this key is %s + ''', + self.algorithm.upper() + )) + + params = self['private_key_algorithm']['parameters'] + chosen = params.chosen + + if params.name == 'implicit_ca': + value = None + else: + value = chosen.native + + return (params.name, value) + + @property + def hash_algo(self): + """ + Returns the name of the family of hash algorithms used to generate a + DSA key + + :raises: + ValueError - when the key is not a DSA key + + :return: + A unicode string of "sha1" or "sha2" + """ + + if self.algorithm != 'dsa': + raise ValueError(unwrap( + ''' + Only DSA keys are generated using a hash algorithm, this key is + %s + ''', + self.algorithm.upper() + )) + + byte_len = math.log(self['private_key_algorithm']['parameters']['q'].native, 2) / 8 + + return 'sha1' if byte_len <= 20 else 'sha2' + + @property + def algorithm(self): + """ + :return: + A unicode string of "rsa", "dsa" or "ec" + """ + + if self._algorithm is None: + self._algorithm = self['private_key_algorithm']['algorithm'].native + return self._algorithm + + @property + def bit_size(self): + """ + :return: + The bit size of the private key, as an integer + """ + + if self._bit_size is None: + if self.algorithm == 'rsa': + prime = self['private_key'].parsed['modulus'].native + elif self.algorithm == 'dsa': + prime = self['private_key_algorithm']['parameters']['p'].native + elif self.algorithm == 'ec': + prime = self['private_key'].parsed['private_key'].native + self._bit_size = int(math.ceil(math.log(prime, 2))) + modulus = self._bit_size % 8 + if modulus != 0: + self._bit_size += 8 - modulus + return self._bit_size + + @property + def byte_size(self): + """ + :return: + The byte size of the private key, as an integer + """ + + return int(math.ceil(self.bit_size / 8)) + + @property + def public_key(self): + """ + :return: + If an RSA key, an RSAPublicKey object. If a DSA key, an Integer + object. If an EC key, an ECPointBitString object. + """ + + if self._public_key is None: + if self.algorithm == 'ec': + key = self['private_key'].parsed + if key['public_key']: + self._public_key = key['public_key'].untag() + else: + self._public_key = self._compute_public_key() + else: + self._public_key = self._compute_public_key() + + return self._public_key + + @property + def public_key_info(self): + """ + :return: + A PublicKeyInfo object derived from this private key. + """ + + return PublicKeyInfo({ + 'algorithm': { + 'algorithm': self.algorithm, + 'parameters': self['private_key_algorithm']['parameters'] + }, + 'public_key': self.public_key + }) + + @property + def fingerprint(self): + """ + Creates a fingerprint that can be compared with a public key to see if + the two form a pair. + + This fingerprint is not compatible with fingerprints generated by any + other software. + + :return: + A byte string that is a sha256 hash of selected components (based + on the key type) + """ + + if self._fingerprint is None: + params = self['private_key_algorithm']['parameters'] + key = self['private_key'].parsed + + if self.algorithm == 'rsa': + to_hash = '%d:%d' % ( + key['modulus'].native, + key['public_exponent'].native, + ) + + elif self.algorithm == 'dsa': + public_key = self.public_key + to_hash = '%d:%d:%d:%d' % ( + params['p'].native, + params['q'].native, + params['g'].native, + public_key.native, + ) + + elif self.algorithm == 'ec': + public_key = key['public_key'].native + if public_key is None: + public_key = self.public_key.native + + if params.name == 'named': + to_hash = '%s:' % params.chosen.native + to_hash = to_hash.encode('utf-8') + to_hash += public_key + + elif params.name == 'implicit_ca': + to_hash = public_key + + elif params.name == 'specified': + to_hash = '%s:' % params.chosen['field_id']['parameters'].native + to_hash = to_hash.encode('utf-8') + to_hash += b':' + params.chosen['curve']['a'].native + to_hash += b':' + params.chosen['curve']['b'].native + to_hash += public_key + + if isinstance(to_hash, str_cls): + to_hash = to_hash.encode('utf-8') + + self._fingerprint = hashlib.sha256(to_hash).digest() + + return self._fingerprint + + +class EncryptedPrivateKeyInfo(Sequence): + """ + Source: https://tools.ietf.org/html/rfc5208#page-4 + """ + + _fields = [ + ('encryption_algorithm', EncryptionAlgorithm), + ('encrypted_data', OctetString), + ] + + +# These structures are from https://tools.ietf.org/html/rfc3279 + +class ValidationParms(Sequence): + """ + Source: https://tools.ietf.org/html/rfc3279#page-10 + """ + + _fields = [ + ('seed', BitString), + ('pgen_counter', Integer), + ] + + +class DomainParameters(Sequence): + """ + Source: https://tools.ietf.org/html/rfc3279#page-10 + """ + + _fields = [ + ('p', Integer), + ('g', Integer), + ('q', Integer), + ('j', Integer, {'optional': True}), + ('validation_params', ValidationParms, {'optional': True}), + ] + + +class PublicKeyAlgorithmId(ObjectIdentifier): + """ + Original Name: None + Source: https://tools.ietf.org/html/rfc3279 + """ + + _map = { + # https://tools.ietf.org/html/rfc3279#page-19 + '1.2.840.113549.1.1.1': 'rsa', + # https://tools.ietf.org/html/rfc3447#page-47 + '1.2.840.113549.1.1.7': 'rsaes_oaep', + # https://tools.ietf.org/html/rfc3279#page-18 + '1.2.840.10040.4.1': 'dsa', + # https://tools.ietf.org/html/rfc3279#page-13 + '1.2.840.10045.2.1': 'ec', + # https://tools.ietf.org/html/rfc3279#page-10 + '1.2.840.10046.2.1': 'dh', + } + + +class PublicKeyAlgorithm(_ForceNullParameters, Sequence): + """ + Original Name: AlgorithmIdentifier + Source: https://tools.ietf.org/html/rfc5280#page-18 + """ + + _fields = [ + ('algorithm', PublicKeyAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + _oid_pair = ('algorithm', 'parameters') + _oid_specs = { + 'dsa': DSAParams, + 'ec': ECDomainParameters, + 'dh': DomainParameters, + 'rsaes_oaep': RSAESOAEPParams, + } + + +class PublicKeyInfo(Sequence): + """ + Original Name: SubjectPublicKeyInfo + Source: https://tools.ietf.org/html/rfc5280#page-17 + """ + + _fields = [ + ('algorithm', PublicKeyAlgorithm), + ('public_key', ParsableOctetBitString), + ] + + def _public_key_spec(self): + algorithm = self['algorithm']['algorithm'].native + return { + 'rsa': RSAPublicKey, + 'rsaes_oaep': RSAPublicKey, + 'dsa': Integer, + # We override the field spec with ECPoint so that users can easily + # decompose the byte string into the constituent X and Y coords + 'ec': (ECPointBitString, None), + 'dh': Integer, + }[algorithm] + + _spec_callbacks = { + 'public_key': _public_key_spec + } + + _algorithm = None + _bit_size = None + _fingerprint = None + _sha1 = None + _sha256 = None + + @classmethod + def wrap(cls, public_key, algorithm): + """ + Wraps a public key in a PublicKeyInfo structure + + :param public_key: + A byte string or Asn1Value object of the public key + + :param algorithm: + A unicode string of "rsa" + + :return: + A PublicKeyInfo object + """ + + if not isinstance(public_key, byte_cls) and not isinstance(public_key, Asn1Value): + raise TypeError(unwrap( + ''' + public_key must be a byte string or Asn1Value, not %s + ''', + type_name(public_key) + )) + + if algorithm != 'rsa': + raise ValueError(unwrap( + ''' + algorithm must "rsa", not %s + ''', + repr(algorithm) + )) + + algo = PublicKeyAlgorithm() + algo['algorithm'] = PublicKeyAlgorithmId(algorithm) + algo['parameters'] = Null() + + container = cls() + container['algorithm'] = algo + if isinstance(public_key, Asn1Value): + public_key = public_key.untag().dump() + container['public_key'] = ParsableOctetBitString(public_key) + + return container + + def unwrap(self): + """ + Unwraps an RSA public key into an RSAPublicKey object. Does not support + DSA or EC public keys since they do not have an unwrapped form. + + :return: + An RSAPublicKey object + """ + + if self.algorithm == 'rsa': + return self['public_key'].parsed + + key_type = self.algorithm.upper() + a_an = 'an' if key_type == 'EC' else 'a' + raise ValueError(unwrap( + ''' + Only RSA public keys may be unwrapped - this key is %s %s public + key + ''', + a_an, + key_type + )) + + @property + def curve(self): + """ + Returns information about the curve used for an EC key + + :raises: + ValueError - when the key is not an EC key + + :return: + A two-element tuple, with the first element being a unicode string + of "implicit_ca", "specified" or "named". If the first element is + "implicit_ca", the second is None. If "specified", the second is + an OrderedDict that is the native version of SpecifiedECDomain. If + "named", the second is a unicode string of the curve name. + """ + + if self.algorithm != 'ec': + raise ValueError(unwrap( + ''' + Only EC keys have a curve, this key is %s + ''', + self.algorithm.upper() + )) + + params = self['algorithm']['parameters'] + chosen = params.chosen + + if params.name == 'implicit_ca': + value = None + else: + value = chosen.native + + return (params.name, value) + + @property + def hash_algo(self): + """ + Returns the name of the family of hash algorithms used to generate a + DSA key + + :raises: + ValueError - when the key is not a DSA key + + :return: + A unicode string of "sha1" or "sha2" or None if no parameters are + present + """ + + if self.algorithm != 'dsa': + raise ValueError(unwrap( + ''' + Only DSA keys are generated using a hash algorithm, this key is + %s + ''', + self.algorithm.upper() + )) + + parameters = self['algorithm']['parameters'] + if parameters.native is None: + return None + + byte_len = math.log(parameters['q'].native, 2) / 8 + + return 'sha1' if byte_len <= 20 else 'sha2' + + @property + def algorithm(self): + """ + :return: + A unicode string of "rsa", "dsa" or "ec" + """ + + if self._algorithm is None: + self._algorithm = self['algorithm']['algorithm'].native + return self._algorithm + + @property + def bit_size(self): + """ + :return: + The bit size of the public key, as an integer + """ + + if self._bit_size is None: + if self.algorithm == 'ec': + self._bit_size = ((len(self['public_key'].native) - 1) / 2) * 8 + else: + if self.algorithm == 'rsa': + prime = self['public_key'].parsed['modulus'].native + elif self.algorithm == 'dsa': + prime = self['algorithm']['parameters']['p'].native + self._bit_size = int(math.ceil(math.log(prime, 2))) + modulus = self._bit_size % 8 + if modulus != 0: + self._bit_size += 8 - modulus + + return self._bit_size + + @property + def byte_size(self): + """ + :return: + The byte size of the public key, as an integer + """ + + return int(math.ceil(self.bit_size / 8)) + + @property + def sha1(self): + """ + :return: + The SHA1 hash of the DER-encoded bytes of this public key info + """ + + if self._sha1 is None: + self._sha1 = hashlib.sha1(byte_cls(self['public_key'])).digest() + return self._sha1 + + @property + def sha256(self): + """ + :return: + The SHA-256 hash of the DER-encoded bytes of this public key info + """ + + if self._sha256 is None: + self._sha256 = hashlib.sha256(byte_cls(self['public_key'])).digest() + return self._sha256 + + @property + def fingerprint(self): + """ + Creates a fingerprint that can be compared with a private key to see if + the two form a pair. + + This fingerprint is not compatible with fingerprints generated by any + other software. + + :return: + A byte string that is a sha256 hash of selected components (based + on the key type) + """ + + if self._fingerprint is None: + key_type = self['algorithm']['algorithm'].native + params = self['algorithm']['parameters'] + + if key_type == 'rsa': + key = self['public_key'].parsed + to_hash = '%d:%d' % ( + key['modulus'].native, + key['public_exponent'].native, + ) + + elif key_type == 'dsa': + key = self['public_key'].parsed + to_hash = '%d:%d:%d:%d' % ( + params['p'].native, + params['q'].native, + params['g'].native, + key.native, + ) + + elif key_type == 'ec': + key = self['public_key'] + + if params.name == 'named': + to_hash = '%s:' % params.chosen.native + to_hash = to_hash.encode('utf-8') + to_hash += key.native + + elif params.name == 'implicit_ca': + to_hash = key.native + + elif params.name == 'specified': + to_hash = '%s:' % params.chosen['field_id']['parameters'].native + to_hash = to_hash.encode('utf-8') + to_hash += b':' + params.chosen['curve']['a'].native + to_hash += b':' + params.chosen['curve']['b'].native + to_hash += key.native + + if isinstance(to_hash, str_cls): + to_hash = to_hash.encode('utf-8') + + self._fingerprint = hashlib.sha256(to_hash).digest() + + return self._fingerprint diff --git a/venv/lib/python2.7/site-packages/asn1crypto/ocsp.py b/venv/lib/python2.7/site-packages/asn1crypto/ocsp.py new file mode 100644 index 0000000..f18d8e8 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/ocsp.py @@ -0,0 +1,652 @@ +# coding: utf-8 + +""" +ASN.1 type classes for the online certificate status protocol (OCSP). Exports +the following items: + + - OCSPRequest() + - OCSPResponse() + +Other type classes are defined that help compose the types listed above. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from .algos import DigestAlgorithm, SignedDigestAlgorithm +from .core import ( + Boolean, + Choice, + Enumerated, + GeneralizedTime, + IA5String, + Integer, + Null, + ObjectIdentifier, + OctetBitString, + OctetString, + ParsableOctetString, + Sequence, + SequenceOf, +) +from .crl import AuthorityInfoAccessSyntax, CRLReason +from .keys import PublicKeyAlgorithm +from .x509 import Certificate, GeneralName, GeneralNames, Name + + +# The structures in this file are taken from https://tools.ietf.org/html/rfc6960 + + +class Version(Integer): + _map = { + 0: 'v1' + } + + +class CertId(Sequence): + _fields = [ + ('hash_algorithm', DigestAlgorithm), + ('issuer_name_hash', OctetString), + ('issuer_key_hash', OctetString), + ('serial_number', Integer), + ] + + +class ServiceLocator(Sequence): + _fields = [ + ('issuer', Name), + ('locator', AuthorityInfoAccessSyntax), + ] + + +class RequestExtensionId(ObjectIdentifier): + _map = { + '1.3.6.1.5.5.7.48.1.7': 'service_locator', + } + + +class RequestExtension(Sequence): + _fields = [ + ('extn_id', RequestExtensionId), + ('critical', Boolean, {'default': False}), + ('extn_value', ParsableOctetString), + ] + + _oid_pair = ('extn_id', 'extn_value') + _oid_specs = { + 'service_locator': ServiceLocator, + } + + +class RequestExtensions(SequenceOf): + _child_spec = RequestExtension + + +class Request(Sequence): + _fields = [ + ('req_cert', CertId), + ('single_request_extensions', RequestExtensions, {'explicit': 0, 'optional': True}), + ] + + _processed_extensions = False + _critical_extensions = None + _service_locator_value = None + + def _set_extensions(self): + """ + Sets common named extensions to private attributes and creates a list + of critical extensions + """ + + self._critical_extensions = set() + + for extension in self['single_request_extensions']: + name = extension['extn_id'].native + attribute_name = '_%s_value' % name + if hasattr(self, attribute_name): + setattr(self, attribute_name, extension['extn_value'].parsed) + if extension['critical'].native: + self._critical_extensions.add(name) + + self._processed_extensions = True + + @property + def critical_extensions(self): + """ + Returns a set of the names (or OID if not a known extension) of the + extensions marked as critical + + :return: + A set of unicode strings + """ + + if not self._processed_extensions: + self._set_extensions() + return self._critical_extensions + + @property + def service_locator_value(self): + """ + This extension is used when communicating with an OCSP responder that + acts as a proxy for OCSP requests + + :return: + None or a ServiceLocator object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._service_locator_value + + +class Requests(SequenceOf): + _child_spec = Request + + +class ResponseType(ObjectIdentifier): + _map = { + '1.3.6.1.5.5.7.48.1.1': 'basic_ocsp_response', + } + + +class AcceptableResponses(SequenceOf): + _child_spec = ResponseType + + +class PreferredSignatureAlgorithm(Sequence): + _fields = [ + ('sig_identifier', SignedDigestAlgorithm), + ('cert_identifier', PublicKeyAlgorithm, {'optional': True}), + ] + + +class PreferredSignatureAlgorithms(SequenceOf): + _child_spec = PreferredSignatureAlgorithm + + +class TBSRequestExtensionId(ObjectIdentifier): + _map = { + '1.3.6.1.5.5.7.48.1.2': 'nonce', + '1.3.6.1.5.5.7.48.1.4': 'acceptable_responses', + '1.3.6.1.5.5.7.48.1.8': 'preferred_signature_algorithms', + } + + +class TBSRequestExtension(Sequence): + _fields = [ + ('extn_id', TBSRequestExtensionId), + ('critical', Boolean, {'default': False}), + ('extn_value', ParsableOctetString), + ] + + _oid_pair = ('extn_id', 'extn_value') + _oid_specs = { + 'nonce': OctetString, + 'acceptable_responses': AcceptableResponses, + 'preferred_signature_algorithms': PreferredSignatureAlgorithms, + } + + +class TBSRequestExtensions(SequenceOf): + _child_spec = TBSRequestExtension + + +class TBSRequest(Sequence): + _fields = [ + ('version', Version, {'explicit': 0, 'default': 'v1'}), + ('requestor_name', GeneralName, {'explicit': 1, 'optional': True}), + ('request_list', Requests), + ('request_extensions', TBSRequestExtensions, {'explicit': 2, 'optional': True}), + ] + + +class Certificates(SequenceOf): + _child_spec = Certificate + + +class Signature(Sequence): + _fields = [ + ('signature_algorithm', SignedDigestAlgorithm), + ('signature', OctetBitString), + ('certs', Certificates, {'explicit': 0, 'optional': True}), + ] + + +class OCSPRequest(Sequence): + _fields = [ + ('tbs_request', TBSRequest), + ('optional_signature', Signature, {'explicit': 0, 'optional': True}), + ] + + _processed_extensions = False + _critical_extensions = None + _nonce_value = None + _acceptable_responses_value = None + _preferred_signature_algorithms_value = None + + def _set_extensions(self): + """ + Sets common named extensions to private attributes and creates a list + of critical extensions + """ + + self._critical_extensions = set() + + for extension in self['tbs_request']['request_extensions']: + name = extension['extn_id'].native + attribute_name = '_%s_value' % name + if hasattr(self, attribute_name): + setattr(self, attribute_name, extension['extn_value'].parsed) + if extension['critical'].native: + self._critical_extensions.add(name) + + self._processed_extensions = True + + @property + def critical_extensions(self): + """ + Returns a set of the names (or OID if not a known extension) of the + extensions marked as critical + + :return: + A set of unicode strings + """ + + if not self._processed_extensions: + self._set_extensions() + return self._critical_extensions + + @property + def nonce_value(self): + """ + This extension is used to prevent replay attacks by including a unique, + random value with each request/response pair + + :return: + None or an OctetString object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._nonce_value + + @property + def acceptable_responses_value(self): + """ + This extension is used to allow the client and server to communicate + with alternative response formats other than just basic_ocsp_response, + although no other formats are defined in the standard. + + :return: + None or an AcceptableResponses object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._acceptable_responses_value + + @property + def preferred_signature_algorithms_value(self): + """ + This extension is used by the client to define what signature algorithms + are preferred, including both the hash algorithm and the public key + algorithm, with a level of detail down to even the public key algorithm + parameters, such as curve name. + + :return: + None or a PreferredSignatureAlgorithms object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._preferred_signature_algorithms_value + + +class OCSPResponseStatus(Enumerated): + _map = { + 0: 'successful', + 1: 'malformed_request', + 2: 'internal_error', + 3: 'try_later', + 5: 'sign_required', + 6: 'unauthorized', + } + + +class ResponderId(Choice): + _alternatives = [ + ('by_name', Name, {'explicit': 1}), + ('by_key', OctetString, {'explicit': 2}), + ] + + +class RevokedInfo(Sequence): + _fields = [ + ('revocation_time', GeneralizedTime), + ('revocation_reason', CRLReason, {'explicit': 0, 'optional': True}), + ] + + +class CertStatus(Choice): + _alternatives = [ + ('good', Null, {'implicit': 0}), + ('revoked', RevokedInfo, {'implicit': 1}), + ('unknown', Null, {'implicit': 2}), + ] + + +class CrlId(Sequence): + _fields = [ + ('crl_url', IA5String, {'explicit': 0, 'optional': True}), + ('crl_num', Integer, {'explicit': 1, 'optional': True}), + ('crl_time', GeneralizedTime, {'explicit': 2, 'optional': True}), + ] + + +class SingleResponseExtensionId(ObjectIdentifier): + _map = { + '1.3.6.1.5.5.7.48.1.3': 'crl', + '1.3.6.1.5.5.7.48.1.6': 'archive_cutoff', + # These are CRLEntryExtension values from + # https://tools.ietf.org/html/rfc5280 + '2.5.29.21': 'crl_reason', + '2.5.29.24': 'invalidity_date', + '2.5.29.29': 'certificate_issuer', + # https://tools.ietf.org/html/rfc6962.html#page-13 + '1.3.6.1.4.1.11129.2.4.5': 'signed_certificate_timestamp_list', + } + + +class SingleResponseExtension(Sequence): + _fields = [ + ('extn_id', SingleResponseExtensionId), + ('critical', Boolean, {'default': False}), + ('extn_value', ParsableOctetString), + ] + + _oid_pair = ('extn_id', 'extn_value') + _oid_specs = { + 'crl': CrlId, + 'archive_cutoff': GeneralizedTime, + 'crl_reason': CRLReason, + 'invalidity_date': GeneralizedTime, + 'certificate_issuer': GeneralNames, + 'signed_certificate_timestamp_list': OctetString, + } + + +class SingleResponseExtensions(SequenceOf): + _child_spec = SingleResponseExtension + + +class SingleResponse(Sequence): + _fields = [ + ('cert_id', CertId), + ('cert_status', CertStatus), + ('this_update', GeneralizedTime), + ('next_update', GeneralizedTime, {'explicit': 0, 'optional': True}), + ('single_extensions', SingleResponseExtensions, {'explicit': 1, 'optional': True}), + ] + + _processed_extensions = False + _critical_extensions = None + _crl_value = None + _archive_cutoff_value = None + _crl_reason_value = None + _invalidity_date_value = None + _certificate_issuer_value = None + + def _set_extensions(self): + """ + Sets common named extensions to private attributes and creates a list + of critical extensions + """ + + self._critical_extensions = set() + + for extension in self['single_extensions']: + name = extension['extn_id'].native + attribute_name = '_%s_value' % name + if hasattr(self, attribute_name): + setattr(self, attribute_name, extension['extn_value'].parsed) + if extension['critical'].native: + self._critical_extensions.add(name) + + self._processed_extensions = True + + @property + def critical_extensions(self): + """ + Returns a set of the names (or OID if not a known extension) of the + extensions marked as critical + + :return: + A set of unicode strings + """ + + if not self._processed_extensions: + self._set_extensions() + return self._critical_extensions + + @property + def crl_value(self): + """ + This extension is used to locate the CRL that a certificate's revocation + is contained within. + + :return: + None or a CrlId object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._crl_value + + @property + def archive_cutoff_value(self): + """ + This extension is used to indicate the date at which an archived + (historical) certificate status entry will no longer be available. + + :return: + None or a GeneralizedTime object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._archive_cutoff_value + + @property + def crl_reason_value(self): + """ + This extension indicates the reason that a certificate was revoked. + + :return: + None or a CRLReason object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._crl_reason_value + + @property + def invalidity_date_value(self): + """ + This extension indicates the suspected date/time the private key was + compromised or the certificate became invalid. This would usually be + before the revocation date, which is when the CA processed the + revocation. + + :return: + None or a GeneralizedTime object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._invalidity_date_value + + @property + def certificate_issuer_value(self): + """ + This extension indicates the issuer of the certificate in question. + + :return: + None or an x509.GeneralNames object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._certificate_issuer_value + + +class Responses(SequenceOf): + _child_spec = SingleResponse + + +class ResponseDataExtensionId(ObjectIdentifier): + _map = { + '1.3.6.1.5.5.7.48.1.2': 'nonce', + '1.3.6.1.5.5.7.48.1.9': 'extended_revoke', + } + + +class ResponseDataExtension(Sequence): + _fields = [ + ('extn_id', ResponseDataExtensionId), + ('critical', Boolean, {'default': False}), + ('extn_value', ParsableOctetString), + ] + + _oid_pair = ('extn_id', 'extn_value') + _oid_specs = { + 'nonce': OctetString, + 'extended_revoke': Null, + } + + +class ResponseDataExtensions(SequenceOf): + _child_spec = ResponseDataExtension + + +class ResponseData(Sequence): + _fields = [ + ('version', Version, {'explicit': 0, 'default': 'v1'}), + ('responder_id', ResponderId), + ('produced_at', GeneralizedTime), + ('responses', Responses), + ('response_extensions', ResponseDataExtensions, {'explicit': 1, 'optional': True}), + ] + + +class BasicOCSPResponse(Sequence): + _fields = [ + ('tbs_response_data', ResponseData), + ('signature_algorithm', SignedDigestAlgorithm), + ('signature', OctetBitString), + ('certs', Certificates, {'explicit': 0, 'optional': True}), + ] + + +class ResponseBytes(Sequence): + _fields = [ + ('response_type', ResponseType), + ('response', ParsableOctetString), + ] + + _oid_pair = ('response_type', 'response') + _oid_specs = { + 'basic_ocsp_response': BasicOCSPResponse, + } + + +class OCSPResponse(Sequence): + _fields = [ + ('response_status', OCSPResponseStatus), + ('response_bytes', ResponseBytes, {'explicit': 0, 'optional': True}), + ] + + _processed_extensions = False + _critical_extensions = None + _nonce_value = None + _extended_revoke_value = None + + def _set_extensions(self): + """ + Sets common named extensions to private attributes and creates a list + of critical extensions + """ + + self._critical_extensions = set() + + for extension in self['response_bytes']['response'].parsed['tbs_response_data']['response_extensions']: + name = extension['extn_id'].native + attribute_name = '_%s_value' % name + if hasattr(self, attribute_name): + setattr(self, attribute_name, extension['extn_value'].parsed) + if extension['critical'].native: + self._critical_extensions.add(name) + + self._processed_extensions = True + + @property + def critical_extensions(self): + """ + Returns a set of the names (or OID if not a known extension) of the + extensions marked as critical + + :return: + A set of unicode strings + """ + + if not self._processed_extensions: + self._set_extensions() + return self._critical_extensions + + @property + def nonce_value(self): + """ + This extension is used to prevent replay attacks on the request/response + exchange + + :return: + None or an OctetString object + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._nonce_value + + @property + def extended_revoke_value(self): + """ + This extension is used to signal that the responder will return a + "revoked" status for non-issued certificates. + + :return: + None or a Null object (if present) + """ + + if self._processed_extensions is False: + self._set_extensions() + return self._extended_revoke_value + + @property + def basic_ocsp_response(self): + """ + A shortcut into the BasicOCSPResponse sequence + + :return: + None or an asn1crypto.ocsp.BasicOCSPResponse object + """ + + return self['response_bytes']['response'].parsed + + @property + def response_data(self): + """ + A shortcut into the parsed, ResponseData sequence + + :return: + None or an asn1crypto.ocsp.ResponseData object + """ + + return self['response_bytes']['response'].parsed['tbs_response_data'] diff --git a/venv/lib/python2.7/site-packages/asn1crypto/parser.py b/venv/lib/python2.7/site-packages/asn1crypto/parser.py new file mode 100644 index 0000000..07f53ab --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/parser.py @@ -0,0 +1,289 @@ +# coding: utf-8 + +""" +Functions for parsing and dumping using the ASN.1 DER encoding. Exports the +following items: + + - emit() + - parse() + - peek() + +Other type classes are defined that help compose the types listed above. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +import sys + +from ._types import byte_cls, chr_cls, type_name +from .util import int_from_bytes, int_to_bytes + +_PY2 = sys.version_info <= (3,) +_INSUFFICIENT_DATA_MESSAGE = 'Insufficient data - %s bytes requested but only %s available' + + +def emit(class_, method, tag, contents): + """ + Constructs a byte string of an ASN.1 DER-encoded value + + This is typically not useful. Instead, use one of the standard classes from + asn1crypto.core, or construct a new class with specific fields, and call the + .dump() method. + + :param class_: + An integer ASN.1 class value: 0 (universal), 1 (application), + 2 (context), 3 (private) + + :param method: + An integer ASN.1 method value: 0 (primitive), 1 (constructed) + + :param tag: + An integer ASN.1 tag value + + :param contents: + A byte string of the encoded byte contents + + :return: + A byte string of the ASN.1 DER value (header and contents) + """ + + if not isinstance(class_, int): + raise TypeError('class_ must be an integer, not %s' % type_name(class_)) + + if class_ < 0 or class_ > 3: + raise ValueError('class_ must be one of 0, 1, 2 or 3, not %s' % class_) + + if not isinstance(method, int): + raise TypeError('method must be an integer, not %s' % type_name(method)) + + if method < 0 or method > 1: + raise ValueError('method must be 0 or 1, not %s' % method) + + if not isinstance(tag, int): + raise TypeError('tag must be an integer, not %s' % type_name(tag)) + + if tag < 0: + raise ValueError('tag must be greater than zero, not %s' % tag) + + if not isinstance(contents, byte_cls): + raise TypeError('contents must be a byte string, not %s' % type_name(contents)) + + return _dump_header(class_, method, tag, contents) + contents + + +def parse(contents, strict=False): + """ + Parses a byte string of ASN.1 BER/DER-encoded data. + + This is typically not useful. Instead, use one of the standard classes from + asn1crypto.core, or construct a new class with specific fields, and call the + .load() class method. + + :param contents: + A byte string of BER/DER-encoded data + + :param strict: + A boolean indicating if trailing data should be forbidden - if so, a + ValueError will be raised when trailing data exists + + :raises: + ValueError - when the contents do not contain an ASN.1 header or are truncated in some way + TypeError - when contents is not a byte string + + :return: + A 6-element tuple: + - 0: integer class (0 to 3) + - 1: integer method + - 2: integer tag + - 3: byte string header + - 4: byte string content + - 5: byte string trailer + """ + + if not isinstance(contents, byte_cls): + raise TypeError('contents must be a byte string, not %s' % type_name(contents)) + + contents_len = len(contents) + info, consumed = _parse(contents, contents_len) + if strict and consumed != contents_len: + raise ValueError('Extra data - %d bytes of trailing data were provided' % (contents_len - consumed)) + return info + + +def peek(contents): + """ + Parses a byte string of ASN.1 BER/DER-encoded data to find the length + + This is typically used to look into an encoded value to see how long the + next chunk of ASN.1-encoded data is. Primarily it is useful when a + value is a concatenation of multiple values. + + :param contents: + A byte string of BER/DER-encoded data + + :raises: + ValueError - when the contents do not contain an ASN.1 header or are truncated in some way + TypeError - when contents is not a byte string + + :return: + An integer with the number of bytes occupied by the ASN.1 value + """ + + if not isinstance(contents, byte_cls): + raise TypeError('contents must be a byte string, not %s' % type_name(contents)) + + info, consumed = _parse(contents, len(contents)) + return consumed + + +def _parse(encoded_data, data_len, pointer=0, lengths_only=False): + """ + Parses a byte string into component parts + + :param encoded_data: + A byte string that contains BER-encoded data + + :param data_len: + The integer length of the encoded data + + :param pointer: + The index in the byte string to parse from + + :param lengths_only: + A boolean to cause the call to return a 2-element tuple of the integer + number of bytes in the header and the integer number of bytes in the + contents. Internal use only. + + :return: + A 2-element tuple: + - 0: A tuple of (class_, method, tag, header, content, trailer) + - 1: An integer indicating how many bytes were consumed + """ + + if data_len < pointer + 2: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (2, data_len - pointer)) + + start = pointer + first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer] + pointer += 1 + + tag = first_octet & 31 + # Base 128 length using 8th bit as continuation indicator + if tag == 31: + tag = 0 + while True: + num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer] + pointer += 1 + tag *= 128 + tag += num & 127 + if num >> 7 == 0: + break + + length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer] + pointer += 1 + + if length_octet >> 7 == 0: + if lengths_only: + return (pointer, pointer + (length_octet & 127)) + contents_end = pointer + (length_octet & 127) + + else: + length_octets = length_octet & 127 + if length_octets: + pointer += length_octets + contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False) + if lengths_only: + return (pointer, contents_end) + + else: + # To properly parse indefinite length values, we need to scan forward + # parsing headers until we find a value with a length of zero. If we + # just scanned looking for \x00\x00, nested indefinite length values + # would not work. + contents_end = pointer + # Unfortunately we need to understand the contents of the data to + # properly scan forward, which bleeds some representation info into + # the parser. This condition handles the unused bits byte in + # constructed bit strings. + if tag == 3: + contents_end += 1 + while contents_end < data_len: + sub_header_end, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True) + if contents_end == sub_header_end and encoded_data[contents_end - 2:contents_end] == b'\x00\x00': + break + if lengths_only: + return (pointer, contents_end) + if contents_end > data_len: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len)) + return ( + ( + first_octet >> 6, + (first_octet >> 5) & 1, + tag, + encoded_data[start:pointer], + encoded_data[pointer:contents_end - 2], + b'\x00\x00' + ), + contents_end + ) + + if contents_end > data_len: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len)) + return ( + ( + first_octet >> 6, + (first_octet >> 5) & 1, + tag, + encoded_data[start:pointer], + encoded_data[pointer:contents_end], + b'' + ), + contents_end + ) + + +def _dump_header(class_, method, tag, contents): + """ + Constructs the header bytes for an ASN.1 object + + :param class_: + An integer ASN.1 class value: 0 (universal), 1 (application), + 2 (context), 3 (private) + + :param method: + An integer ASN.1 method value: 0 (primitive), 1 (constructed) + + :param tag: + An integer ASN.1 tag value + + :param contents: + A byte string of the encoded byte contents + + :return: + A byte string of the ASN.1 DER header + """ + + header = b'' + + id_num = 0 + id_num |= class_ << 6 + id_num |= method << 5 + + if tag >= 31: + header += chr_cls(id_num | 31) + while tag > 0: + continuation_bit = 0x80 if tag > 0x7F else 0 + header += chr_cls(continuation_bit | (tag & 0x7F)) + tag = tag >> 7 + else: + header += chr_cls(id_num | tag) + + length = len(contents) + if length <= 127: + header += chr_cls(length) + else: + length_bytes = int_to_bytes(length) + header += chr_cls(0x80 | len(length_bytes)) + header += length_bytes + + return header diff --git a/venv/lib/python2.7/site-packages/asn1crypto/pdf.py b/venv/lib/python2.7/site-packages/asn1crypto/pdf.py new file mode 100644 index 0000000..b72c886 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/pdf.py @@ -0,0 +1,84 @@ +# coding: utf-8 + +""" +ASN.1 type classes for PDF signature structures. Adds extra oid mapping and +value parsing to asn1crypto.x509.Extension() and asn1crypto.xms.CMSAttribute(). +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from .cms import CMSAttributeType, CMSAttribute +from .core import ( + Boolean, + Integer, + Null, + ObjectIdentifier, + OctetString, + Sequence, + SequenceOf, + SetOf, +) +from .crl import CertificateList +from .ocsp import OCSPResponse +from .x509 import ( + Extension, + ExtensionId, + GeneralName, + KeyPurposeId, +) + + +class AdobeArchiveRevInfo(Sequence): + _fields = [ + ('version', Integer) + ] + + +class AdobeTimestamp(Sequence): + _fields = [ + ('version', Integer), + ('location', GeneralName), + ('requires_auth', Boolean, {'optional': True, 'default': False}), + ] + + +class OtherRevInfo(Sequence): + _fields = [ + ('type', ObjectIdentifier), + ('value', OctetString), + ] + + +class SequenceOfCertificateList(SequenceOf): + _child_spec = CertificateList + + +class SequenceOfOCSPResponse(SequenceOf): + _child_spec = OCSPResponse + + +class SequenceOfOtherRevInfo(SequenceOf): + _child_spec = OtherRevInfo + + +class RevocationInfoArchival(Sequence): + _fields = [ + ('crl', SequenceOfCertificateList, {'explicit': 0, 'optional': True}), + ('ocsp', SequenceOfOCSPResponse, {'explicit': 1, 'optional': True}), + ('other_rev_info', SequenceOfOtherRevInfo, {'explicit': 2, 'optional': True}), + ] + + +class SetOfRevocationInfoArchival(SetOf): + _child_spec = RevocationInfoArchival + + +ExtensionId._map['1.2.840.113583.1.1.9.2'] = 'adobe_archive_rev_info' +ExtensionId._map['1.2.840.113583.1.1.9.1'] = 'adobe_timestamp' +ExtensionId._map['1.2.840.113583.1.1.10'] = 'adobe_ppklite_credential' +Extension._oid_specs['adobe_archive_rev_info'] = AdobeArchiveRevInfo +Extension._oid_specs['adobe_timestamp'] = AdobeTimestamp +Extension._oid_specs['adobe_ppklite_credential'] = Null +KeyPurposeId._map['1.2.840.113583.1.1.5'] = 'pdf_signing' +CMSAttributeType._map['1.2.840.113583.1.1.8'] = 'adobe_revocation_info_archival' +CMSAttribute._oid_specs['adobe_revocation_info_archival'] = SetOfRevocationInfoArchival diff --git a/venv/lib/python2.7/site-packages/asn1crypto/pem.py b/venv/lib/python2.7/site-packages/asn1crypto/pem.py new file mode 100644 index 0000000..511ea4b --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/pem.py @@ -0,0 +1,222 @@ +# coding: utf-8 + +""" +Encoding DER to PEM and decoding PEM to DER. Exports the following items: + + - armor() + - detect() + - unarmor() + +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +import base64 +import re +import sys + +from ._errors import unwrap +from ._types import type_name as _type_name, str_cls, byte_cls + +if sys.version_info < (3,): + from cStringIO import StringIO as BytesIO +else: + from io import BytesIO + + +def detect(byte_string): + """ + Detect if a byte string seems to contain a PEM-encoded block + + :param byte_string: + A byte string to look through + + :return: + A boolean, indicating if a PEM-encoded block is contained in the byte + string + """ + + if not isinstance(byte_string, byte_cls): + raise TypeError(unwrap( + ''' + byte_string must be a byte string, not %s + ''', + _type_name(byte_string) + )) + + return byte_string.find(b'-----BEGIN') != -1 or byte_string.find(b'---- BEGIN') != -1 + + +def armor(type_name, der_bytes, headers=None): + """ + Armors a DER-encoded byte string in PEM + + :param type_name: + A unicode string that will be capitalized and placed in the header + and footer of the block. E.g. "CERTIFICATE", "PRIVATE KEY", etc. This + will appear as "-----BEGIN CERTIFICATE-----" and + "-----END CERTIFICATE-----". + + :param der_bytes: + A byte string to be armored + + :param headers: + An OrderedDict of the header lines to write after the BEGIN line + + :return: + A byte string of the PEM block + """ + + if not isinstance(der_bytes, byte_cls): + raise TypeError(unwrap( + ''' + der_bytes must be a byte string, not %s + ''' % _type_name(der_bytes) + )) + + if not isinstance(type_name, str_cls): + raise TypeError(unwrap( + ''' + type_name must be a unicode string, not %s + ''', + _type_name(type_name) + )) + + type_name = type_name.upper().encode('ascii') + + output = BytesIO() + output.write(b'-----BEGIN ') + output.write(type_name) + output.write(b'-----\n') + if headers: + for key in headers: + output.write(key.encode('ascii')) + output.write(b': ') + output.write(headers[key].encode('ascii')) + output.write(b'\n') + output.write(b'\n') + b64_bytes = base64.b64encode(der_bytes) + b64_len = len(b64_bytes) + i = 0 + while i < b64_len: + output.write(b64_bytes[i:i + 64]) + output.write(b'\n') + i += 64 + output.write(b'-----END ') + output.write(type_name) + output.write(b'-----\n') + + return output.getvalue() + + +def _unarmor(pem_bytes): + """ + Convert a PEM-encoded byte string into one or more DER-encoded byte strings + + :param pem_bytes: + A byte string of the PEM-encoded data + + :raises: + ValueError - when the pem_bytes do not appear to be PEM-encoded bytes + + :return: + A generator of 3-element tuples in the format: (object_type, headers, + der_bytes). The object_type is a unicode string of what is between + "-----BEGIN " and "-----". Examples include: "CERTIFICATE", + "PUBLIC KEY", "PRIVATE KEY". The headers is a dict containing any lines + in the form "Name: Value" that are right after the begin line. + """ + + if not isinstance(pem_bytes, byte_cls): + raise TypeError(unwrap( + ''' + pem_bytes must be a byte string, not %s + ''', + _type_name(pem_bytes) + )) + + # Valid states include: "trash", "headers", "body" + state = 'trash' + headers = {} + base64_data = b'' + object_type = None + + found_start = False + found_end = False + + for line in pem_bytes.splitlines(False): + if line == b'': + continue + + if state == "trash": + # Look for a starting line since some CA cert bundle show the cert + # into in a parsed format above each PEM block + type_name_match = re.match(b'^(?:---- |-----)BEGIN ([A-Z0-9 ]+)(?: ----|-----)', line) + if not type_name_match: + continue + object_type = type_name_match.group(1).decode('ascii') + + found_start = True + state = 'headers' + continue + + if state == 'headers': + if line.find(b':') == -1: + state = 'body' + else: + decoded_line = line.decode('ascii') + name, value = decoded_line.split(':', 1) + headers[name] = value.strip() + continue + + if state == 'body': + if line[0:5] in (b'-----', b'---- '): + der_bytes = base64.b64decode(base64_data) + + yield (object_type, headers, der_bytes) + + state = 'trash' + headers = {} + base64_data = b'' + object_type = None + found_end = True + continue + + base64_data += line + + if not found_start or not found_end: + raise ValueError(unwrap( + ''' + pem_bytes does not appear to contain PEM-encoded data - no + BEGIN/END combination found + ''' + )) + + +def unarmor(pem_bytes, multiple=False): + """ + Convert a PEM-encoded byte string into a DER-encoded byte string + + :param pem_bytes: + A byte string of the PEM-encoded data + + :param multiple: + If True, function will return a generator + + :raises: + ValueError - when the pem_bytes do not appear to be PEM-encoded bytes + + :return: + A 3-element tuple (object_name, headers, der_bytes). The object_name is + a unicode string of what is between "-----BEGIN " and "-----". Examples + include: "CERTIFICATE", "PUBLIC KEY", "PRIVATE KEY". The headers is a + dict containing any lines in the form "Name: Value" that are right + after the begin line. + """ + + generator = _unarmor(pem_bytes) + + if not multiple: + return next(generator) + + return generator diff --git a/venv/lib/python2.7/site-packages/asn1crypto/pkcs12.py b/venv/lib/python2.7/site-packages/asn1crypto/pkcs12.py new file mode 100644 index 0000000..7ebcefe --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/pkcs12.py @@ -0,0 +1,193 @@ +# coding: utf-8 + +""" +ASN.1 type classes for PKCS#12 files. Exports the following items: + + - CertBag() + - CrlBag() + - Pfx() + - SafeBag() + - SecretBag() + +Other type classes are defined that help compose the types listed above. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from .algos import DigestInfo +from .cms import ContentInfo, SignedData +from .core import ( + Any, + BMPString, + Integer, + ObjectIdentifier, + OctetString, + ParsableOctetString, + Sequence, + SequenceOf, + SetOf, +) +from .keys import PrivateKeyInfo, EncryptedPrivateKeyInfo +from .x509 import Certificate, KeyPurposeId + + +# The structures in this file are taken from https://tools.ietf.org/html/rfc7292 + +class MacData(Sequence): + _fields = [ + ('mac', DigestInfo), + ('mac_salt', OctetString), + ('iterations', Integer, {'default': 1}), + ] + + +class Version(Integer): + _map = { + 3: 'v3' + } + + +class AttributeType(ObjectIdentifier): + _map = { + # https://tools.ietf.org/html/rfc2985#page-18 + '1.2.840.113549.1.9.20': 'friendly_name', + '1.2.840.113549.1.9.21': 'local_key_id', + # https://support.microsoft.com/en-us/kb/287547 + '1.3.6.1.4.1.311.17.1': 'microsoft_local_machine_keyset', + # https://github.com/frohoff/jdk8u-dev-jdk/blob/master/src/share/classes/sun/security/pkcs12/PKCS12KeyStore.java + # this is a set of OIDs, representing key usage, the usual value is a SET of one element OID 2.5.29.37.0 + '2.16.840.1.113894.746875.1.1': 'trusted_key_usage', + } + + +class SetOfAny(SetOf): + _child_spec = Any + + +class SetOfBMPString(SetOf): + _child_spec = BMPString + + +class SetOfOctetString(SetOf): + _child_spec = OctetString + + +class SetOfKeyPurposeId(SetOf): + _child_spec = KeyPurposeId + + +class Attribute(Sequence): + _fields = [ + ('type', AttributeType), + ('values', None), + ] + + _oid_specs = { + 'friendly_name': SetOfBMPString, + 'local_key_id': SetOfOctetString, + 'microsoft_csp_name': SetOfBMPString, + 'trusted_key_usage': SetOfKeyPurposeId, + } + + def _values_spec(self): + return self._oid_specs.get(self['type'].native, SetOfAny) + + _spec_callbacks = { + 'values': _values_spec + } + + +class Attributes(SetOf): + _child_spec = Attribute + + +class Pfx(Sequence): + _fields = [ + ('version', Version), + ('auth_safe', ContentInfo), + ('mac_data', MacData, {'optional': True}) + ] + + _authenticated_safe = None + + @property + def authenticated_safe(self): + if self._authenticated_safe is None: + content = self['auth_safe']['content'] + if isinstance(content, SignedData): + content = content['content_info']['content'] + self._authenticated_safe = AuthenticatedSafe.load(content.native) + return self._authenticated_safe + + +class AuthenticatedSafe(SequenceOf): + _child_spec = ContentInfo + + +class BagId(ObjectIdentifier): + _map = { + '1.2.840.113549.1.12.10.1.1': 'key_bag', + '1.2.840.113549.1.12.10.1.2': 'pkcs8_shrouded_key_bag', + '1.2.840.113549.1.12.10.1.3': 'cert_bag', + '1.2.840.113549.1.12.10.1.4': 'crl_bag', + '1.2.840.113549.1.12.10.1.5': 'secret_bag', + '1.2.840.113549.1.12.10.1.6': 'safe_contents', + } + + +class CertId(ObjectIdentifier): + _map = { + '1.2.840.113549.1.9.22.1': 'x509', + '1.2.840.113549.1.9.22.2': 'sdsi', + } + + +class CertBag(Sequence): + _fields = [ + ('cert_id', CertId), + ('cert_value', ParsableOctetString, {'explicit': 0}), + ] + + _oid_pair = ('cert_id', 'cert_value') + _oid_specs = { + 'x509': Certificate, + } + + +class CrlBag(Sequence): + _fields = [ + ('crl_id', ObjectIdentifier), + ('crl_value', OctetString, {'explicit': 0}), + ] + + +class SecretBag(Sequence): + _fields = [ + ('secret_type_id', ObjectIdentifier), + ('secret_value', OctetString, {'explicit': 0}), + ] + + +class SafeContents(SequenceOf): + pass + + +class SafeBag(Sequence): + _fields = [ + ('bag_id', BagId), + ('bag_value', Any, {'explicit': 0}), + ('bag_attributes', Attributes, {'optional': True}), + ] + + _oid_pair = ('bag_id', 'bag_value') + _oid_specs = { + 'key_bag': PrivateKeyInfo, + 'pkcs8_shrouded_key_bag': EncryptedPrivateKeyInfo, + 'cert_bag': CertBag, + 'crl_bag': CrlBag, + 'secret_bag': SecretBag, + 'safe_contents': SafeContents + } + + +SafeContents._child_spec = SafeBag diff --git a/venv/lib/python2.7/site-packages/asn1crypto/tsp.py b/venv/lib/python2.7/site-packages/asn1crypto/tsp.py new file mode 100644 index 0000000..bd40810 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/tsp.py @@ -0,0 +1,310 @@ +# coding: utf-8 + +""" +ASN.1 type classes for the time stamp protocol (TSP). Exports the following +items: + + - TimeStampReq() + - TimeStampResp() + +Also adds TimeStampedData() support to asn1crypto.cms.ContentInfo(), +TimeStampedData() and TSTInfo() support to +asn1crypto.cms.EncapsulatedContentInfo() and some oids and value parsers to +asn1crypto.cms.CMSAttribute(). + +Other type classes are defined that help compose the types listed above. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from .algos import DigestAlgorithm +from .cms import ( + CMSAttribute, + CMSAttributeType, + ContentInfo, + ContentType, + EncapsulatedContentInfo, +) +from .core import ( + Any, + BitString, + Boolean, + Choice, + GeneralizedTime, + IA5String, + Integer, + ObjectIdentifier, + OctetString, + Sequence, + SequenceOf, + SetOf, + UTF8String, +) +from .crl import CertificateList +from .x509 import ( + Attributes, + CertificatePolicies, + GeneralName, + GeneralNames, +) + + +# The structures in this file are based on https://tools.ietf.org/html/rfc3161, +# https://tools.ietf.org/html/rfc4998, https://tools.ietf.org/html/rfc5544, +# https://tools.ietf.org/html/rfc5035, https://tools.ietf.org/html/rfc2634 + +class Version(Integer): + _map = { + 0: 'v0', + 1: 'v1', + 2: 'v2', + 3: 'v3', + 4: 'v4', + 5: 'v5', + } + + +class MessageImprint(Sequence): + _fields = [ + ('hash_algorithm', DigestAlgorithm), + ('hashed_message', OctetString), + ] + + +class Accuracy(Sequence): + _fields = [ + ('seconds', Integer, {'optional': True}), + ('millis', Integer, {'implicit': 0, 'optional': True}), + ('micros', Integer, {'implicit': 1, 'optional': True}), + ] + + +class Extension(Sequence): + _fields = [ + ('extn_id', ObjectIdentifier), + ('critical', Boolean, {'default': False}), + ('extn_value', OctetString), + ] + + +class Extensions(SequenceOf): + _child_spec = Extension + + +class TSTInfo(Sequence): + _fields = [ + ('version', Version), + ('policy', ObjectIdentifier), + ('message_imprint', MessageImprint), + ('serial_number', Integer), + ('gen_time', GeneralizedTime), + ('accuracy', Accuracy, {'optional': True}), + ('ordering', Boolean, {'default': False}), + ('nonce', Integer, {'optional': True}), + ('tsa', GeneralName, {'explicit': 0, 'optional': True}), + ('extensions', Extensions, {'implicit': 1, 'optional': True}), + ] + + +class TimeStampReq(Sequence): + _fields = [ + ('version', Version), + ('message_imprint', MessageImprint), + ('req_policy', ObjectIdentifier, {'optional': True}), + ('nonce', Integer, {'optional': True}), + ('cert_req', Boolean, {'default': False}), + ('extensions', Extensions, {'implicit': 0, 'optional': True}), + ] + + +class PKIStatus(Integer): + _map = { + 0: 'granted', + 1: 'granted_with_mods', + 2: 'rejection', + 3: 'waiting', + 4: 'revocation_warning', + 5: 'revocation_notification', + } + + +class PKIFreeText(SequenceOf): + _child_spec = UTF8String + + +class PKIFailureInfo(BitString): + _map = { + 0: 'bad_alg', + 2: 'bad_request', + 5: 'bad_data_format', + 14: 'time_not_available', + 15: 'unaccepted_policy', + 16: 'unaccepted_extensions', + 17: 'add_info_not_available', + 25: 'system_failure', + } + + +class PKIStatusInfo(Sequence): + _fields = [ + ('status', PKIStatus), + ('status_string', PKIFreeText, {'optional': True}), + ('fail_info', PKIFailureInfo, {'optional': True}), + ] + + +class TimeStampResp(Sequence): + _fields = [ + ('status', PKIStatusInfo), + ('time_stamp_token', ContentInfo), + ] + + +class MetaData(Sequence): + _fields = [ + ('hash_protected', Boolean), + ('file_name', UTF8String, {'optional': True}), + ('media_type', IA5String, {'optional': True}), + ('other_meta_data', Attributes, {'optional': True}), + ] + + +class TimeStampAndCRL(SequenceOf): + _fields = [ + ('time_stamp', EncapsulatedContentInfo), + ('crl', CertificateList, {'optional': True}), + ] + + +class TimeStampTokenEvidence(SequenceOf): + _child_spec = TimeStampAndCRL + + +class DigestAlgorithms(SequenceOf): + _child_spec = DigestAlgorithm + + +class EncryptionInfo(Sequence): + _fields = [ + ('encryption_info_type', ObjectIdentifier), + ('encryption_info_value', Any), + ] + + +class PartialHashtree(SequenceOf): + _child_spec = OctetString + + +class PartialHashtrees(SequenceOf): + _child_spec = PartialHashtree + + +class ArchiveTimeStamp(Sequence): + _fields = [ + ('digest_algorithm', DigestAlgorithm, {'implicit': 0, 'optional': True}), + ('attributes', Attributes, {'implicit': 1, 'optional': True}), + ('reduced_hashtree', PartialHashtrees, {'implicit': 2, 'optional': True}), + ('time_stamp', ContentInfo), + ] + + +class ArchiveTimeStampSequence(SequenceOf): + _child_spec = ArchiveTimeStamp + + +class EvidenceRecord(Sequence): + _fields = [ + ('version', Version), + ('digest_algorithms', DigestAlgorithms), + ('crypto_infos', Attributes, {'implicit': 0, 'optional': True}), + ('encryption_info', EncryptionInfo, {'implicit': 1, 'optional': True}), + ('archive_time_stamp_sequence', ArchiveTimeStampSequence), + ] + + +class OtherEvidence(Sequence): + _fields = [ + ('oe_type', ObjectIdentifier), + ('oe_value', Any), + ] + + +class Evidence(Choice): + _alternatives = [ + ('tst_evidence', TimeStampTokenEvidence, {'implicit': 0}), + ('ers_evidence', EvidenceRecord, {'implicit': 1}), + ('other_evidence', OtherEvidence, {'implicit': 2}), + ] + + +class TimeStampedData(Sequence): + _fields = [ + ('version', Version), + ('data_uri', IA5String, {'optional': True}), + ('meta_data', MetaData, {'optional': True}), + ('content', OctetString, {'optional': True}), + ('temporal_evidence', Evidence), + ] + + +class IssuerSerial(Sequence): + _fields = [ + ('issuer', GeneralNames), + ('serial_number', Integer), + ] + + +class ESSCertID(Sequence): + _fields = [ + ('cert_hash', OctetString), + ('issuer_serial', IssuerSerial, {'optional': True}), + ] + + +class ESSCertIDs(SequenceOf): + _child_spec = ESSCertID + + +class SigningCertificate(Sequence): + _fields = [ + ('certs', ESSCertIDs), + ('policies', CertificatePolicies, {'optional': True}), + ] + + +class SetOfSigningCertificates(SetOf): + _child_spec = SigningCertificate + + +class ESSCertIDv2(Sequence): + _fields = [ + ('hash_algorithm', DigestAlgorithm, {'default': {'algorithm': 'sha256'}}), + ('cert_hash', OctetString), + ('issuer_serial', IssuerSerial, {'optional': True}), + ] + + +class ESSCertIDv2s(SequenceOf): + _child_spec = ESSCertIDv2 + + +class SigningCertificateV2(Sequence): + _fields = [ + ('certs', ESSCertIDv2s), + ('policies', CertificatePolicies, {'optional': True}), + ] + + +class SetOfSigningCertificatesV2(SetOf): + _child_spec = SigningCertificateV2 + + +EncapsulatedContentInfo._oid_specs['tst_info'] = TSTInfo +EncapsulatedContentInfo._oid_specs['timestamped_data'] = TimeStampedData +ContentInfo._oid_specs['timestamped_data'] = TimeStampedData +ContentType._map['1.2.840.113549.1.9.16.1.4'] = 'tst_info' +ContentType._map['1.2.840.113549.1.9.16.1.31'] = 'timestamped_data' +CMSAttributeType._map['1.2.840.113549.1.9.16.2.12'] = 'signing_certificate' +CMSAttribute._oid_specs['signing_certificate'] = SetOfSigningCertificates +CMSAttributeType._map['1.2.840.113549.1.9.16.2.47'] = 'signing_certificate_v2' +CMSAttribute._oid_specs['signing_certificate_v2'] = SetOfSigningCertificatesV2 diff --git a/venv/lib/python2.7/site-packages/asn1crypto/util.py b/venv/lib/python2.7/site-packages/asn1crypto/util.py new file mode 100644 index 0000000..2e55ef8 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/util.py @@ -0,0 +1,712 @@ +# coding: utf-8 + +""" +Miscellaneous data helpers, including functions for converting integers to and +from bytes and UTC timezone. Exports the following items: + + - OrderedDict() + - int_from_bytes() + - int_to_bytes() + - timezone.utc + - inet_ntop() + - inet_pton() + - uri_to_iri() + - iri_to_uri() +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +import math +import sys +from datetime import datetime, date, time + +from ._errors import unwrap +from ._iri import iri_to_uri, uri_to_iri # noqa +from ._ordereddict import OrderedDict # noqa +from ._types import type_name + +if sys.platform == 'win32': + from ._inet import inet_ntop, inet_pton +else: + from socket import inet_ntop, inet_pton # noqa + + +# Python 2 +if sys.version_info <= (3,): + + from datetime import timedelta, tzinfo + + py2 = True + + def int_to_bytes(value, signed=False, width=None): + """ + Converts an integer to a byte string + + :param value: + The integer to convert + + :param signed: + If the byte string should be encoded using two's complement + + :param width: + None == auto, otherwise an integer of the byte width for the return + value + + :return: + A byte string + """ + + # Handle negatives in two's complement + is_neg = False + if signed and value < 0: + is_neg = True + bits = int(math.ceil(len('%x' % abs(value)) / 2.0) * 8) + value = (value + (1 << bits)) % (1 << bits) + + hex_str = '%x' % value + if len(hex_str) & 1: + hex_str = '0' + hex_str + + output = hex_str.decode('hex') + + if signed and not is_neg and ord(output[0:1]) & 0x80: + output = b'\x00' + output + + if width is not None: + if is_neg: + pad_char = b'\xFF' + else: + pad_char = b'\x00' + output = (pad_char * (width - len(output))) + output + elif is_neg and ord(output[0:1]) & 0x80 == 0: + output = b'\xFF' + output + + return output + + def int_from_bytes(value, signed=False): + """ + Converts a byte string to an integer + + :param value: + The byte string to convert + + :param signed: + If the byte string should be interpreted using two's complement + + :return: + An integer + """ + + if value == b'': + return 0 + + num = long(value.encode("hex"), 16) # noqa + + if not signed: + return num + + # Check for sign bit and handle two's complement + if ord(value[0:1]) & 0x80: + bit_len = len(value) * 8 + return num - (1 << bit_len) + + return num + + class utc(tzinfo): # noqa + + def tzname(self, _): + return b'UTC+00:00' + + def utcoffset(self, _): + return timedelta(0) + + def dst(self, _): + return timedelta(0) + + class timezone(): # noqa + + utc = utc() + + +# Python 3 +else: + + from datetime import timezone # noqa + + py2 = False + + def int_to_bytes(value, signed=False, width=None): + """ + Converts an integer to a byte string + + :param value: + The integer to convert + + :param signed: + If the byte string should be encoded using two's complement + + :param width: + None == auto, otherwise an integer of the byte width for the return + value + + :return: + A byte string + """ + + if width is None: + if signed: + if value < 0: + bits_required = abs(value + 1).bit_length() + else: + bits_required = value.bit_length() + if bits_required % 8 == 0: + bits_required += 1 + else: + bits_required = value.bit_length() + width = math.ceil(bits_required / 8) or 1 + return value.to_bytes(width, byteorder='big', signed=signed) + + def int_from_bytes(value, signed=False): + """ + Converts a byte string to an integer + + :param value: + The byte string to convert + + :param signed: + If the byte string should be interpreted using two's complement + + :return: + An integer + """ + + return int.from_bytes(value, 'big', signed=signed) + + +_DAYS_PER_MONTH_YEAR_0 = { + 1: 31, + 2: 29, # Year 0 was a leap year + 3: 31, + 4: 30, + 5: 31, + 6: 30, + 7: 31, + 8: 31, + 9: 30, + 10: 31, + 11: 30, + 12: 31 +} + + +class extended_date(object): + """ + A datetime.date-like object that can represent the year 0. This is just + to handle 0000-01-01 found in some certificates. + """ + + year = None + month = None + day = None + + def __init__(self, year, month, day): + """ + :param year: + The integer 0 + + :param month: + An integer from 1 to 12 + + :param day: + An integer from 1 to 31 + """ + + if year != 0: + raise ValueError('year must be 0') + + if month < 1 or month > 12: + raise ValueError('month is out of range') + + if day < 0 or day > _DAYS_PER_MONTH_YEAR_0[month]: + raise ValueError('day is out of range') + + self.year = year + self.month = month + self.day = day + + def _format(self, format): + """ + Performs strftime(), always returning a unicode string + + :param format: + A strftime() format string + + :return: + A unicode string of the formatted date + """ + + format = format.replace('%Y', '0000') + # Year 0 is 1BC and a leap year. Leap years repeat themselves + # every 28 years. Because of adjustments and the proleptic gregorian + # calendar, the simplest way to format is to substitute year 2000. + temp = date(2000, self.month, self.day) + if '%c' in format: + c_out = temp.strftime('%c') + # Handle full years + c_out = c_out.replace('2000', '0000') + c_out = c_out.replace('%', '%%') + format = format.replace('%c', c_out) + if '%x' in format: + x_out = temp.strftime('%x') + # Handle formats such as 08/16/2000 or 16.08.2000 + x_out = x_out.replace('2000', '0000') + x_out = x_out.replace('%', '%%') + format = format.replace('%x', x_out) + return temp.strftime(format) + + def isoformat(self): + """ + Formats the date as %Y-%m-%d + + :return: + The date formatted to %Y-%m-%d as a unicode string in Python 3 + and a byte string in Python 2 + """ + + return self.strftime('0000-%m-%d') + + def strftime(self, format): + """ + Formats the date using strftime() + + :param format: + The strftime() format string + + :return: + The formatted date as a unicode string in Python 3 and a byte + string in Python 2 + """ + + output = self._format(format) + if py2: + return output.encode('utf-8') + return output + + def replace(self, year=None, month=None, day=None): + """ + Returns a new datetime.date or asn1crypto.util.extended_date + object with the specified components replaced + + :return: + A datetime.date or asn1crypto.util.extended_date object + """ + + if year is None: + year = self.year + if month is None: + month = self.month + if day is None: + day = self.day + + if year > 0: + cls = date + else: + cls = extended_date + + return cls( + year, + month, + day + ) + + def __str__(self): + if py2: + return self.__bytes__() + else: + return self.__unicode__() + + def __bytes__(self): + return self.__unicode__().encode('utf-8') + + def __unicode__(self): + return self._format('%Y-%m-%d') + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return self.__cmp__(other) == 0 + + def __ne__(self, other): + return not self.__eq__(other) + + def _comparison_error(self, other): + raise TypeError(unwrap( + ''' + An asn1crypto.util.extended_date object can only be compared to + an asn1crypto.util.extended_date or datetime.date object, not %s + ''', + type_name(other) + )) + + def __cmp__(self, other): + if isinstance(other, date): + return -1 + + if not isinstance(other, self.__class__): + self._comparison_error(other) + + st = ( + self.year, + self.month, + self.day + ) + ot = ( + other.year, + other.month, + other.day + ) + + if st < ot: + return -1 + if st > ot: + return 1 + return 0 + + def __lt__(self, other): + return self.__cmp__(other) < 0 + + def __le__(self, other): + return self.__cmp__(other) <= 0 + + def __gt__(self, other): + return self.__cmp__(other) > 0 + + def __ge__(self, other): + return self.__cmp__(other) >= 0 + + +class extended_datetime(object): + """ + A datetime.datetime-like object that can represent the year 0. This is just + to handle 0000-01-01 found in some certificates. + """ + + year = None + month = None + day = None + hour = None + minute = None + second = None + microsecond = None + tzinfo = None + + def __init__(self, year, month, day, hour=0, minute=0, second=0, microsecond=0, tzinfo=None): + """ + :param year: + The integer 0 + + :param month: + An integer from 1 to 12 + + :param day: + An integer from 1 to 31 + + :param hour: + An integer from 0 to 23 + + :param minute: + An integer from 0 to 59 + + :param second: + An integer from 0 to 59 + + :param microsecond: + An integer from 0 to 999999 + """ + + if year != 0: + raise ValueError('year must be 0') + + if month < 1 or month > 12: + raise ValueError('month is out of range') + + if day < 0 or day > _DAYS_PER_MONTH_YEAR_0[month]: + raise ValueError('day is out of range') + + if hour < 0 or hour > 23: + raise ValueError('hour is out of range') + + if minute < 0 or minute > 59: + raise ValueError('minute is out of range') + + if second < 0 or second > 59: + raise ValueError('second is out of range') + + if microsecond < 0 or microsecond > 999999: + raise ValueError('microsecond is out of range') + + self.year = year + self.month = month + self.day = day + self.hour = hour + self.minute = minute + self.second = second + self.microsecond = microsecond + self.tzinfo = tzinfo + + def date(self): + """ + :return: + An asn1crypto.util.extended_date of the date + """ + + return extended_date(self.year, self.month, self.day) + + def time(self): + """ + :return: + A datetime.time object of the time + """ + + return time(self.hour, self.minute, self.second, self.microsecond, self.tzinfo) + + def utcoffset(self): + """ + :return: + None or a datetime.timedelta() of the offset from UTC + """ + + if self.tzinfo is None: + return None + return self.tzinfo.utcoffset(self.replace(year=2000)) + + def dst(self): + """ + :return: + None or a datetime.timedelta() of the daylight savings time offset + """ + + if self.tzinfo is None: + return None + return self.tzinfo.dst(self.replace(year=2000)) + + def tzname(self): + """ + :return: + None or the name of the timezone as a unicode string in Python 3 + and a byte string in Python 2 + """ + + if self.tzinfo is None: + return None + return self.tzinfo.tzname(self.replace(year=2000)) + + def _format(self, format): + """ + Performs strftime(), always returning a unicode string + + :param format: + A strftime() format string + + :return: + A unicode string of the formatted datetime + """ + + format = format.replace('%Y', '0000') + # Year 0 is 1BC and a leap year. Leap years repeat themselves + # every 28 years. Because of adjustments and the proleptic gregorian + # calendar, the simplest way to format is to substitute year 2000. + temp = datetime( + 2000, + self.month, + self.day, + self.hour, + self.minute, + self.second, + self.microsecond, + self.tzinfo + ) + if '%c' in format: + c_out = temp.strftime('%c') + # Handle full years + c_out = c_out.replace('2000', '0000') + c_out = c_out.replace('%', '%%') + format = format.replace('%c', c_out) + if '%x' in format: + x_out = temp.strftime('%x') + # Handle formats such as 08/16/2000 or 16.08.2000 + x_out = x_out.replace('2000', '0000') + x_out = x_out.replace('%', '%%') + format = format.replace('%x', x_out) + return temp.strftime(format) + + def isoformat(self, sep='T'): + """ + Formats the date as "%Y-%m-%d %H:%M:%S" with the sep param between the + date and time portions + + :param set: + A single character of the separator to place between the date and + time + + :return: + The formatted datetime as a unicode string in Python 3 and a byte + string in Python 2 + """ + + if self.microsecond == 0: + return self.strftime('0000-%%m-%%d%s%%H:%%M:%%S' % sep) + return self.strftime('0000-%%m-%%d%s%%H:%%M:%%S.%%f' % sep) + + def strftime(self, format): + """ + Formats the date using strftime() + + :param format: + The strftime() format string + + :return: + The formatted date as a unicode string in Python 3 and a byte + string in Python 2 + """ + + output = self._format(format) + if py2: + return output.encode('utf-8') + return output + + def replace(self, year=None, month=None, day=None, hour=None, minute=None, + second=None, microsecond=None, tzinfo=None): + """ + Returns a new datetime.datetime or asn1crypto.util.extended_datetime + object with the specified components replaced + + :return: + A datetime.datetime or asn1crypto.util.extended_datetime object + """ + + if year is None: + year = self.year + if month is None: + month = self.month + if day is None: + day = self.day + if hour is None: + hour = self.hour + if minute is None: + minute = self.minute + if second is None: + second = self.second + if microsecond is None: + microsecond = self.microsecond + if tzinfo is None: + tzinfo = self.tzinfo + + if year > 0: + cls = datetime + else: + cls = extended_datetime + + return cls( + year, + month, + day, + hour, + minute, + second, + microsecond, + tzinfo + ) + + def __str__(self): + if py2: + return self.__bytes__() + else: + return self.__unicode__() + + def __bytes__(self): + return self.__unicode__().encode('utf-8') + + def __unicode__(self): + format = '%Y-%m-%d %H:%M:%S' + if self.microsecond != 0: + format += '.%f' + return self._format(format) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return self.__cmp__(other) == 0 + + def __ne__(self, other): + return not self.__eq__(other) + + def _comparison_error(self, other): + """ + Raises a TypeError about the other object not being suitable for + comparison + + :param other: + The object being compared to + """ + + raise TypeError(unwrap( + ''' + An asn1crypto.util.extended_datetime object can only be compared to + an asn1crypto.util.extended_datetime or datetime.datetime object, + not %s + ''', + type_name(other) + )) + + def __cmp__(self, other): + so = self.utcoffset() + oo = other.utcoffset() + + if (so is not None and oo is None) or (so is None and oo is not None): + raise TypeError("can't compare offset-naive and offset-aware datetimes") + + if isinstance(other, datetime): + return -1 + + if not isinstance(other, self.__class__): + self._comparison_error(other) + + st = ( + self.year, + self.month, + self.day, + self.hour, + self.minute, + self.second, + self.microsecond, + so + ) + ot = ( + other.year, + other.month, + other.day, + other.hour, + other.minute, + other.second, + other.microsecond, + oo + ) + + if st < ot: + return -1 + if st > ot: + return 1 + return 0 + + def __lt__(self, other): + return self.__cmp__(other) < 0 + + def __le__(self, other): + return self.__cmp__(other) <= 0 + + def __gt__(self, other): + return self.__cmp__(other) > 0 + + def __ge__(self, other): + return self.__cmp__(other) >= 0 diff --git a/venv/lib/python2.7/site-packages/asn1crypto/version.py b/venv/lib/python2.7/site-packages/asn1crypto/version.py new file mode 100644 index 0000000..2ce2408 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/version.py @@ -0,0 +1,6 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + + +__version__ = '0.24.0' +__version_info__ = (0, 24, 0) diff --git a/venv/lib/python2.7/site-packages/asn1crypto/x509.py b/venv/lib/python2.7/site-packages/asn1crypto/x509.py new file mode 100644 index 0000000..5a572a3 --- /dev/null +++ b/venv/lib/python2.7/site-packages/asn1crypto/x509.py @@ -0,0 +1,3002 @@ +# coding: utf-8 + +""" +ASN.1 type classes for X.509 certificates. Exports the following items: + + - Attributes() + - Certificate() + - Extensions() + - GeneralName() + - GeneralNames() + - Name() + +Other type classes are defined that help compose the types listed above. +""" + +from __future__ import unicode_literals, division, absolute_import, print_function + +from contextlib import contextmanager +from encodings import idna # noqa +import hashlib +import re +import socket +import stringprep +import sys +import unicodedata + +from ._errors import unwrap +from ._iri import iri_to_uri, uri_to_iri +from ._ordereddict import OrderedDict +from ._types import type_name, str_cls, bytes_to_list +from .algos import AlgorithmIdentifier, AnyAlgorithmIdentifier, DigestAlgorithm, SignedDigestAlgorithm +from .core import ( + Any, + BitString, + BMPString, + Boolean, + Choice, + Concat, + Enumerated, + GeneralizedTime, + GeneralString, + IA5String, + Integer, + Null, + NumericString, + ObjectIdentifier, + OctetBitString, + OctetString, + ParsableOctetString, + PrintableString, + Sequence, + SequenceOf, + Set, + SetOf, + TeletexString, + UniversalString, + UTCTime, + UTF8String, + VisibleString, + VOID, +) +from .keys import PublicKeyInfo +from .util import int_to_bytes, int_from_bytes, inet_ntop, inet_pton + + +# The structures in this file are taken from https://tools.ietf.org/html/rfc5280 +# and a few other supplementary sources, mostly due to extra supported +# extension and name OIDs + + +class DNSName(IA5String): + + _encoding = 'idna' + _bad_tag = 19 + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + """ + Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.2 + + :param other: + Another DNSName object + + :return: + A boolean + """ + + if not isinstance(other, DNSName): + return False + + return self.__unicode__().lower() == other.__unicode__().lower() + + def set(self, value): + """ + Sets the value of the DNS name + + :param value: + A unicode string + """ + + if not isinstance(value, str_cls): + raise TypeError(unwrap( + ''' + %s value must be a unicode string, not %s + ''', + type_name(self), + type_name(value) + )) + + if value.startswith('.'): + encoded_value = b'.' + value[1:].encode(self._encoding) + else: + encoded_value = value.encode(self._encoding) + + self._unicode = value + self.contents = encoded_value + self._header = None + if self._trailer != b'': + self._trailer = b'' + + +class URI(IA5String): + + def set(self, value): + """ + Sets the value of the string + + :param value: + A unicode string + """ + + if not isinstance(value, str_cls): + raise TypeError(unwrap( + ''' + %s value must be a unicode string, not %s + ''', + type_name(self), + type_name(value) + )) + + self._unicode = value + self.contents = iri_to_uri(value) + self._header = None + if self._trailer != b'': + self._trailer = b'' + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + """ + Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.4 + + :param other: + Another URI object + + :return: + A boolean + """ + + if not isinstance(other, URI): + return False + + return iri_to_uri(self.native) == iri_to_uri(other.native) + + def __unicode__(self): + """ + :return: + A unicode string + """ + + if self.contents is None: + return '' + if self._unicode is None: + self._unicode = uri_to_iri(self._merge_chunks()) + return self._unicode + + +class EmailAddress(IA5String): + + _contents = None + + # If the value has gone through the .set() method, thus normalizing it + _normalized = False + + @property + def contents(self): + """ + :return: + A byte string of the DER-encoded contents of the sequence + """ + + return self._contents + + @contents.setter + def contents(self, value): + """ + :param value: + A byte string of the DER-encoded contents of the sequence + """ + + self._normalized = False + self._contents = value + + def set(self, value): + """ + Sets the value of the string + + :param value: + A unicode string + """ + + if not isinstance(value, str_cls): + raise TypeError(unwrap( + ''' + %s value must be a unicode string, not %s + ''', + type_name(self), + type_name(value) + )) + + if value.find('@') != -1: + mailbox, hostname = value.rsplit('@', 1) + encoded_value = mailbox.encode('ascii') + b'@' + hostname.encode('idna') + else: + encoded_value = value.encode('ascii') + + self._normalized = True + self._unicode = value + self.contents = encoded_value + self._header = None + if self._trailer != b'': + self._trailer = b'' + + def __unicode__(self): + """ + :return: + A unicode string + """ + + if self._unicode is None: + contents = self._merge_chunks() + if contents.find(b'@') == -1: + self._unicode = contents.decode('ascii') + else: + mailbox, hostname = contents.rsplit(b'@', 1) + self._unicode = mailbox.decode('ascii') + '@' + hostname.decode('idna') + return self._unicode + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + """ + Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.5 + + :param other: + Another EmailAddress object + + :return: + A boolean + """ + + if not isinstance(other, EmailAddress): + return False + + if not self._normalized: + self.set(self.native) + if not other._normalized: + other.set(other.native) + + if self._contents.find(b'@') == -1 or other._contents.find(b'@') == -1: + return self._contents == other._contents + + other_mailbox, other_hostname = other._contents.rsplit(b'@', 1) + mailbox, hostname = self._contents.rsplit(b'@', 1) + + if mailbox != other_mailbox: + return False + + if hostname.lower() != other_hostname.lower(): + return False + + return True + + +class IPAddress(OctetString): + def parse(self, spec=None, spec_params=None): + """ + This method is not applicable to IP addresses + """ + + raise ValueError(unwrap( + ''' + IP address values can not be parsed + ''' + )) + + def set(self, value): + """ + Sets the value of the object + + :param value: + A unicode string containing an IPv4 address, IPv4 address with CIDR, + an IPv6 address or IPv6 address with CIDR + """ + + if not isinstance(value, str_cls): + raise TypeError(unwrap( + ''' + %s value must be a unicode string, not %s + ''', + type_name(self), + type_name(value) + )) + + original_value = value + + has_cidr = value.find('/') != -1 + cidr = 0 + if has_cidr: + parts = value.split('/', 1) + value = parts[0] + cidr = int(parts[1]) + if cidr < 0: + raise ValueError(unwrap( + ''' + %s value contains a CIDR range less than 0 + ''', + type_name(self) + )) + + if value.find(':') != -1: + family = socket.AF_INET6 + if cidr > 128: + raise ValueError(unwrap( + ''' + %s value contains a CIDR range bigger than 128, the maximum + value for an IPv6 address + ''', + type_name(self) + )) + cidr_size = 128 + else: + family = socket.AF_INET + if cidr > 32: + raise ValueError(unwrap( + ''' + %s value contains a CIDR range bigger than 32, the maximum + value for an IPv4 address + ''', + type_name(self) + )) + cidr_size = 32 + + cidr_bytes = b'' + if has_cidr: + cidr_mask = '1' * cidr + cidr_mask += '0' * (cidr_size - len(cidr_mask)) + cidr_bytes = int_to_bytes(int(cidr_mask, 2)) + cidr_bytes = (b'\x00' * ((cidr_size // 8) - len(cidr_bytes))) + cidr_bytes + + self._native = original_value + self.contents = inet_pton(family, value) + cidr_bytes + self._bytes = self.contents + self._header = None + if self._trailer != b'': + self._trailer = b'' + + @property + def native(self): + """ + The a native Python datatype representation of this value + + :return: + A unicode string or None + """ + + if self.contents is None: + return None + + if self._native is None: + byte_string = self.__bytes__() + byte_len = len(byte_string) + cidr_int = None + if byte_len in set([32, 16]): + value = inet_ntop(socket.AF_INET6, byte_string[0:16]) + if byte_len > 16: + cidr_int = int_from_bytes(byte_string[16:]) + elif byte_len in set([8, 4]): + value = inet_ntop(socket.AF_INET, byte_string[0:4]) + if byte_len > 4: + cidr_int = int_from_bytes(byte_string[4:]) + if cidr_int is not None: + cidr_bits = '{0:b}'.format(cidr_int) + cidr = len(cidr_bits.rstrip('0')) + value = value + '/' + str_cls(cidr) + self._native = value + return self._native + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + """ + :param other: + Another IPAddress object + + :return: + A boolean + """ + + if not isinstance(other, IPAddress): + return False + + return self.__bytes__() == other.__bytes__() + + +class Attribute(Sequence): + _fields = [ + ('type', ObjectIdentifier), + ('values', SetOf, {'spec': Any}), + ] + + +class Attributes(SequenceOf): + _child_spec = Attribute + + +class KeyUsage(BitString): + _map = { + 0: 'digital_signature', + 1: 'non_repudiation', + 2: 'key_encipherment', + 3: 'data_encipherment', + 4: 'key_agreement', + 5: 'key_cert_sign', + 6: 'crl_sign', + 7: 'encipher_only', + 8: 'decipher_only', + } + + +class PrivateKeyUsagePeriod(Sequence): + _fields = [ + ('not_before', GeneralizedTime, {'implicit': 0, 'optional': True}), + ('not_after', GeneralizedTime, {'implicit': 1, 'optional': True}), + ] + + +class NotReallyTeletexString(TeletexString): + """ + OpenSSL (and probably some other libraries) puts ISO-8859-1 + into TeletexString instead of ITU T.61. We use Windows-1252 when + decoding since it is a superset of ISO-8859-1, and less likely to + cause encoding issues, but we stay strict with encoding to prevent + us from creating bad data. + """ + + _decoding_encoding = 'cp1252' + + def __unicode__(self): + """ + :return: + A unicode string + """ + + if self.contents is None: + return '' + if self._unicode is None: + self._unicode = self._merge_chunks().decode(self._decoding_encoding) + return self._unicode + + +@contextmanager +def strict_teletex(): + try: + NotReallyTeletexString._decoding_encoding = 'teletex' + yield + finally: + NotReallyTeletexString._decoding_encoding = 'cp1252' + + +class DirectoryString(Choice): + _alternatives = [ + ('teletex_string', NotReallyTeletexString), + ('printable_string', PrintableString), + ('universal_string', UniversalString), + ('utf8_string', UTF8String), + ('bmp_string', BMPString), + # This is an invalid/bad alternative, but some broken certs use it + ('ia5_string', IA5String), + ] + + +class NameType(ObjectIdentifier): + _map = { + '2.5.4.3': 'common_name', + '2.5.4.4': 'surname', + '2.5.4.5': 'serial_number', + '2.5.4.6': 'country_name', + '2.5.4.7': 'locality_name', + '2.5.4.8': 'state_or_province_name', + '2.5.4.9': 'street_address', + '2.5.4.10': 'organization_name', + '2.5.4.11': 'organizational_unit_name', + '2.5.4.12': 'title', + '2.5.4.15': 'business_category', + '2.5.4.17': 'postal_code', + '2.5.4.20': 'telephone_number', + '2.5.4.41': 'name', + '2.5.4.42': 'given_name', + '2.5.4.43': 'initials', + '2.5.4.44': 'generation_qualifier', + '2.5.4.45': 'unique_identifier', + '2.5.4.46': 'dn_qualifier', + '2.5.4.65': 'pseudonym', + '2.5.4.97': 'organization_identifier', + # https://www.trustedcomputinggroup.org/wp-content/uploads/Credential_Profile_EK_V2.0_R14_published.pdf + '2.23.133.2.1': 'tpm_manufacturer', + '2.23.133.2.2': 'tpm_model', + '2.23.133.2.3': 'tpm_version', + '2.23.133.2.4': 'platform_manufacturer', + '2.23.133.2.5': 'platform_model', + '2.23.133.2.6': 'platform_version', + # https://tools.ietf.org/html/rfc2985#page-26 + '1.2.840.113549.1.9.1': 'email_address', + # Page 10 of https://cabforum.org/wp-content/uploads/EV-V1_5_5.pdf + '1.3.6.1.4.1.311.60.2.1.1': 'incorporation_locality', + '1.3.6.1.4.1.311.60.2.1.2': 'incorporation_state_or_province', + '1.3.6.1.4.1.311.60.2.1.3': 'incorporation_country', + # https://tools.ietf.org/html/rfc2247#section-4 + '0.9.2342.19200300.100.1.25': 'domain_component', + # http://www.alvestrand.no/objectid/0.2.262.1.10.7.20.html + '0.2.262.1.10.7.20': 'name_distinguisher', + } + + # This order is largely based on observed order seen in EV certs from + # Symantec and DigiCert. Some of the uncommon name-related fields are + # just placed in what seems like a reasonable order. + preferred_order = [ + 'incorporation_country', + 'incorporation_state_or_province', + 'incorporation_locality', + 'business_category', + 'serial_number', + 'country_name', + 'postal_code', + 'state_or_province_name', + 'locality_name', + 'street_address', + 'organization_name', + 'organizational_unit_name', + 'title', + 'common_name', + 'initials', + 'generation_qualifier', + 'surname', + 'given_name', + 'name', + 'pseudonym', + 'dn_qualifier', + 'telephone_number', + 'email_address', + 'domain_component', + 'name_distinguisher', + 'organization_identifier', + 'tpm_manufacturer', + 'tpm_model', + 'tpm_version', + 'platform_manufacturer', + 'platform_model', + 'platform_version', + ] + + @classmethod + def preferred_ordinal(cls, attr_name): + """ + Returns an ordering value for a particular attribute key. + + Unrecognized attributes and OIDs will be sorted lexically at the end. + + :return: + An orderable value. + + """ + + attr_name = cls.map(attr_name) + if attr_name in cls.preferred_order: + ordinal = cls.preferred_order.index(attr_name) + else: + ordinal = len(cls.preferred_order) + + return (ordinal, attr_name) + + @property + def human_friendly(self): + """ + :return: + A human-friendly unicode string to display to users + """ + + return { + 'common_name': 'Common Name', + 'surname': 'Surname', + 'serial_number': 'Serial Number', + 'country_name': 'Country', + 'locality_name': 'Locality', + 'state_or_province_name': 'State/Province', + 'street_address': 'Street Address', + 'organization_name': 'Organization', + 'organizational_unit_name': 'Organizational Unit', + 'title': 'Title', + 'business_category': 'Business Category', + 'postal_code': 'Postal Code', + 'telephone_number': 'Telephone Number', + 'name': 'Name', + 'given_name': 'Given Name', + 'initials': 'Initials', + 'generation_qualifier': 'Generation Qualifier', + 'unique_identifier': 'Unique Identifier', + 'dn_qualifier': 'DN Qualifier', + 'pseudonym': 'Pseudonym', + 'email_address': 'Email Address', + 'incorporation_locality': 'Incorporation Locality', + 'incorporation_state_or_province': 'Incorporation State/Province', + 'incorporation_country': 'Incorporation Country', + 'domain_component': 'Domain Component', + 'name_distinguisher': 'Name Distinguisher', + 'organization_identifier': 'Organization Identifier', + 'tpm_manufacturer': 'TPM Manufacturer', + 'tpm_model': 'TPM Model', + 'tpm_version': 'TPM Version', + 'platform_manufacturer': 'Platform Manufacturer', + 'platform_model': 'Platform Model', + 'platform_version': 'Platform Version', + }.get(self.native, self.native) + + +class NameTypeAndValue(Sequence): + _fields = [ + ('type', NameType), + ('value', Any), + ] + + _oid_pair = ('type', 'value') + _oid_specs = { + 'common_name': DirectoryString, + 'surname': DirectoryString, + 'serial_number': DirectoryString, + 'country_name': DirectoryString, + 'locality_name': DirectoryString, + 'state_or_province_name': DirectoryString, + 'street_address': DirectoryString, + 'organization_name': DirectoryString, + 'organizational_unit_name': DirectoryString, + 'title': DirectoryString, + 'business_category': DirectoryString, + 'postal_code': DirectoryString, + 'telephone_number': PrintableString, + 'name': DirectoryString, + 'given_name': DirectoryString, + 'initials': DirectoryString, + 'generation_qualifier': DirectoryString, + 'unique_identifier': OctetBitString, + 'dn_qualifier': DirectoryString, + 'pseudonym': DirectoryString, + # https://tools.ietf.org/html/rfc2985#page-26 + 'email_address': EmailAddress, + # Page 10 of https://cabforum.org/wp-content/uploads/EV-V1_5_5.pdf + 'incorporation_locality': DirectoryString, + 'incorporation_state_or_province': DirectoryString, + 'incorporation_country': DirectoryString, + 'domain_component': DNSName, + 'name_distinguisher': DirectoryString, + 'organization_identifier': DirectoryString, + 'tpm_manufacturer': UTF8String, + 'tpm_model': UTF8String, + 'tpm_version': UTF8String, + 'platform_manufacturer': UTF8String, + 'platform_model': UTF8String, + 'platform_version': UTF8String, + } + + _prepped = None + + @property + def prepped_value(self): + """ + Returns the value after being processed by the internationalized string + preparation as specified by RFC 5280 + + :return: + A unicode string + """ + + if self._prepped is None: + self._prepped = self._ldap_string_prep(self['value'].native) + return self._prepped + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + """ + Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1 + + :param other: + Another NameTypeAndValue object + + :return: + A boolean + """ + + if not isinstance(other, NameTypeAndValue): + return False + + if other['type'].native != self['type'].native: + return False + + return other.prepped_value == self.prepped_value + + def _ldap_string_prep(self, string): + """ + Implements the internationalized string preparation algorithm from + RFC 4518. https://tools.ietf.org/html/rfc4518#section-2 + + :param string: + A unicode string to prepare + + :return: + A prepared unicode string, ready for comparison + """ + + # Map step + string = re.sub('[\u00ad\u1806\u034f\u180b-\u180d\ufe0f-\uff00\ufffc]+', '', string) + string = re.sub('[\u0009\u000a\u000b\u000c\u000d\u0085]', ' ', string) + if sys.maxunicode == 0xffff: + # Some installs of Python 2.7 don't support 8-digit unicode escape + # ranges, so we have to break them into pieces + # Original was: \U0001D173-\U0001D17A and \U000E0020-\U000E007F + string = re.sub('\ud834[\udd73-\udd7a]|\udb40[\udc20-\udc7f]|\U000e0001', '', string) + else: + string = re.sub('[\U0001D173-\U0001D17A\U000E0020-\U000E007F\U000e0001]', '', string) + string = re.sub( + '[\u0000-\u0008\u000e-\u001f\u007f-\u0084\u0086-\u009f\u06dd\u070f\u180e\u200c-\u200f' + '\u202a-\u202e\u2060-\u2063\u206a-\u206f\ufeff\ufff9-\ufffb]+', + '', + string + ) + string = string.replace('\u200b', '') + string = re.sub('[\u00a0\u1680\u2000-\u200a\u2028-\u2029\u202f\u205f\u3000]', ' ', string) + + string = ''.join(map(stringprep.map_table_b2, string)) + + # Normalize step + string = unicodedata.normalize('NFKC', string) + + # Prohibit step + for char in string: + if stringprep.in_table_a1(char): + raise ValueError(unwrap( + ''' + X.509 Name objects may not contain unassigned code points + ''' + )) + + if stringprep.in_table_c8(char): + raise ValueError(unwrap( + ''' + X.509 Name objects may not contain change display or + zzzzdeprecated characters + ''' + )) + + if stringprep.in_table_c3(char): + raise ValueError(unwrap( + ''' + X.509 Name objects may not contain private use characters + ''' + )) + + if stringprep.in_table_c4(char): + raise ValueError(unwrap( + ''' + X.509 Name objects may not contain non-character code points + ''' + )) + + if stringprep.in_table_c5(char): + raise ValueError(unwrap( + ''' + X.509 Name objects may not contain surrogate code points + ''' + )) + + if char == '\ufffd': + raise ValueError(unwrap( + ''' + X.509 Name objects may not contain the replacement character + ''' + )) + + # Check bidirectional step - here we ensure that we are not mixing + # left-to-right and right-to-left text in the string + has_r_and_al_cat = False + has_l_cat = False + for char in string: + if stringprep.in_table_d1(char): + has_r_and_al_cat = True + elif stringprep.in_table_d2(char): + has_l_cat = True + + if has_r_and_al_cat: + first_is_r_and_al = stringprep.in_table_d1(string[0]) + last_is_r_and_al = stringprep.in_table_d1(string[-1]) + + if has_l_cat or not first_is_r_and_al or not last_is_r_and_al: + raise ValueError(unwrap( + ''' + X.509 Name object contains a malformed bidirectional + sequence + ''' + )) + + # Insignificant space handling step + string = ' ' + re.sub(' +', ' ', string).strip() + ' ' + + return string + + +class RelativeDistinguishedName(SetOf): + _child_spec = NameTypeAndValue + + @property + def hashable(self): + """ + :return: + A unicode string that can be used as a dict key or in a set + """ + + output = [] + values = self._get_values(self) + for key in sorted(values.keys()): + output.append('%s: %s' % (key, values[key])) + # Unit separator is used here since the normalization process for + # values moves any such character, and the keys are all dotted integers + # or under_score_words + return '\x1F'.join(output) + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + """ + Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1 + + :param other: + Another RelativeDistinguishedName object + + :return: + A boolean + """ + + if not isinstance(other, RelativeDistinguishedName): + return False + + if len(self) != len(other): + return False + + self_types = self._get_types(self) + other_types = self._get_types(other) + + if self_types != other_types: + return False + + self_values = self._get_values(self) + other_values = self._get_values(other) + + for type_name_ in self_types: + if self_values[type_name_] != other_values[type_name_]: + return False + + return True + + def _get_types(self, rdn): + """ + Returns a set of types contained in an RDN + + :param rdn: + A RelativeDistinguishedName object + + :return: + A set object with unicode strings of NameTypeAndValue type field + values + """ + + return set([ntv['type'].native for ntv in rdn]) + + def _get_values(self, rdn): + """ + Returns a dict of prepped values contained in an RDN + + :param rdn: + A RelativeDistinguishedName object + + :return: + A dict object with unicode strings of NameTypeAndValue value field + values that have been prepped for comparison + """ + + output = {} + [output.update([(ntv['type'].native, ntv.prepped_value)]) for ntv in rdn] + return output + + +class RDNSequence(SequenceOf): + _child_spec = RelativeDistinguishedName + + @property + def hashable(self): + """ + :return: + A unicode string that can be used as a dict key or in a set + """ + + # Record separator is used here since the normalization process for + # values moves any such character, and the keys are all dotted integers + # or under_score_words + return '\x1E'.join(rdn.hashable for rdn in self) + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + """ + Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1 + + :param other: + Another RDNSequence object + + :return: + A boolean + """ + + if not isinstance(other, RDNSequence): + return False + + if len(self) != len(other): + return False + + for index, self_rdn in enumerate(self): + if other[index] != self_rdn: + return False + + return True + + +class Name(Choice): + _alternatives = [ + ('', RDNSequence), + ] + + _human_friendly = None + _sha1 = None + _sha256 = None + + @classmethod + def build(cls, name_dict, use_printable=False): + """ + Creates a Name object from a dict of unicode string keys and values. + The keys should be from NameType._map, or a dotted-integer OID unicode + string. + + :param name_dict: + A dict of name information, e.g. {"common_name": "Will Bond", + "country_name": "US", "organization": "Codex Non Sufficit LC"} + + :param use_printable: + A bool - if PrintableString should be used for encoding instead of + UTF8String. This is for backwards compatibility with old software. + + :return: + An x509.Name object + """ + + rdns = [] + if not use_printable: + encoding_name = 'utf8_string' + encoding_class = UTF8String + else: + encoding_name = 'printable_string' + encoding_class = PrintableString + + # Sort the attributes according to NameType.preferred_order + name_dict = OrderedDict( + sorted( + name_dict.items(), + key=lambda item: NameType.preferred_ordinal(item[0]) + ) + ) + + for attribute_name, attribute_value in name_dict.items(): + attribute_name = NameType.map(attribute_name) + if attribute_name == 'email_address': + value = EmailAddress(attribute_value) + elif attribute_name == 'domain_component': + value = DNSName(attribute_value) + elif attribute_name in set(['dn_qualifier', 'country_name', 'serial_number']): + value = DirectoryString( + name='printable_string', + value=PrintableString(attribute_value) + ) + else: + value = DirectoryString( + name=encoding_name, + value=encoding_class(attribute_value) + ) + + rdns.append(RelativeDistinguishedName([ + NameTypeAndValue({ + 'type': attribute_name, + 'value': value + }) + ])) + + return cls(name='', value=RDNSequence(rdns)) + + @property + def hashable(self): + """ + :return: + A unicode string that can be used as a dict key or in a set + """ + + return self.chosen.hashable + + def __len__(self): + return len(self.chosen) + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + """ + Equality as defined by https://tools.ietf.org/html/rfc5280#section-7.1 + + :param other: + Another Name object + + :return: + A boolean + """ + + if not isinstance(other, Name): + return False + return self.chosen == other.chosen + + @property + def native(self): + if self._native is None: + self._native = OrderedDict() + for rdn in self.chosen.native: + for type_val in rdn: + field_name = type_val['type'] + if field_name in self._native: + existing = self._native[field_name] + if not isinstance(existing, list): + existing = self._native[field_name] = [existing] + existing.append(type_val['value']) + else: + self._native[field_name] = type_val['value'] + return self._native + + @property + def human_friendly(self): + """ + :return: + A human-friendly unicode string containing the parts of the name + """ + + if self._human_friendly is None: + data = OrderedDict() + last_field = None + for rdn in self.chosen: + for type_val in rdn: + field_name = type_val['type'].human_friendly + last_field = field_name + if field_name in data: + data[field_name] = [data[field_name]] + data[field_name].append(type_val['value']) + else: + data[field_name] = type_val['value'] + to_join = [] + keys = data.keys() + if last_field == 'Country': + keys = reversed(list(keys)) + for key in keys: + value = data[key] + native_value = self._recursive_humanize(value) + to_join.append('%s: %s' % (key, native_value)) + + has_comma = False + for element in to_join: + if element.find(',') != -1: + has_comma = True + break + + separator = ', ' if not has_comma else '; ' + self._human_friendly = separator.join(to_join[::-1]) + + return self._human_friendly + + def _recursive_humanize(self, value): + """ + Recursively serializes data compiled from the RDNSequence + + :param value: + An Asn1Value object, or a list of Asn1Value objects + + :return: + A unicode string + """ + + if isinstance(value, list): + return', '.join( + reversed([self._recursive_humanize(sub_value) for sub_value in value]) + ) + return value.native + + @property + def sha1(self): + """ + :return: + The SHA1 hash of the DER-encoded bytes of this name + """ + + if self._sha1 is None: + self._sha1 = hashlib.sha1(self.dump()).digest() + return self._sha1 + + @property + def sha256(self): + """ + :return: + The SHA-256 hash of the DER-encoded bytes of this name + """ + + if self._sha256 is None: + self._sha256 = hashlib.sha256(self.dump()).digest() + return self._sha256 + + +class AnotherName(Sequence): + _fields = [ + ('type_id', ObjectIdentifier), + ('value', Any, {'explicit': 0}), + ] + + +class CountryName(Choice): + class_ = 1 + tag = 1 + + _alternatives = [ + ('x121_dcc_code', NumericString), + ('iso_3166_alpha2_code', PrintableString), + ] + + +class AdministrationDomainName(Choice): + class_ = 1 + tag = 2 + + _alternatives = [ + ('numeric', NumericString), + ('printable', PrintableString), + ] + + +class PrivateDomainName(Choice): + _alternatives = [ + ('numeric', NumericString), + ('printable', PrintableString), + ] + + +class PersonalName(Set): + _fields = [ + ('surname', PrintableString, {'implicit': 0}), + ('given_name', PrintableString, {'implicit': 1, 'optional': True}), + ('initials', PrintableString, {'implicit': 2, 'optional': True}), + ('generation_qualifier', PrintableString, {'implicit': 3, 'optional': True}), + ] + + +class TeletexPersonalName(Set): + _fields = [ + ('surname', TeletexString, {'implicit': 0}), + ('given_name', TeletexString, {'implicit': 1, 'optional': True}), + ('initials', TeletexString, {'implicit': 2, 'optional': True}), + ('generation_qualifier', TeletexString, {'implicit': 3, 'optional': True}), + ] + + +class OrganizationalUnitNames(SequenceOf): + _child_spec = PrintableString + + +class TeletexOrganizationalUnitNames(SequenceOf): + _child_spec = TeletexString + + +class BuiltInStandardAttributes(Sequence): + _fields = [ + ('country_name', CountryName, {'optional': True}), + ('administration_domain_name', AdministrationDomainName, {'optional': True}), + ('network_address', NumericString, {'implicit': 0, 'optional': True}), + ('terminal_identifier', PrintableString, {'implicit': 1, 'optional': True}), + ('private_domain_name', PrivateDomainName, {'explicit': 2, 'optional': True}), + ('organization_name', PrintableString, {'implicit': 3, 'optional': True}), + ('numeric_user_identifier', NumericString, {'implicit': 4, 'optional': True}), + ('personal_name', PersonalName, {'implicit': 5, 'optional': True}), + ('organizational_unit_names', OrganizationalUnitNames, {'implicit': 6, 'optional': True}), + ] + + +class BuiltInDomainDefinedAttribute(Sequence): + _fields = [ + ('type', PrintableString), + ('value', PrintableString), + ] + + +class BuiltInDomainDefinedAttributes(SequenceOf): + _child_spec = BuiltInDomainDefinedAttribute + + +class TeletexDomainDefinedAttribute(Sequence): + _fields = [ + ('type', TeletexString), + ('value', TeletexString), + ] + + +class TeletexDomainDefinedAttributes(SequenceOf): + _child_spec = TeletexDomainDefinedAttribute + + +class PhysicalDeliveryCountryName(Choice): + _alternatives = [ + ('x121_dcc_code', NumericString), + ('iso_3166_alpha2_code', PrintableString), + ] + + +class PostalCode(Choice): + _alternatives = [ + ('numeric_code', NumericString), + ('printable_code', PrintableString), + ] + + +class PDSParameter(Set): + _fields = [ + ('printable_string', PrintableString, {'optional': True}), + ('teletex_string', TeletexString, {'optional': True}), + ] + + +class PrintableAddress(SequenceOf): + _child_spec = PrintableString + + +class UnformattedPostalAddress(Set): + _fields = [ + ('printable_address', PrintableAddress, {'optional': True}), + ('teletex_string', TeletexString, {'optional': True}), + ] + + +class E1634Address(Sequence): + _fields = [ + ('number', NumericString, {'implicit': 0}), + ('sub_address', NumericString, {'implicit': 1, 'optional': True}), + ] + + +class NAddresses(SetOf): + _child_spec = OctetString + + +class PresentationAddress(Sequence): + _fields = [ + ('p_selector', OctetString, {'explicit': 0, 'optional': True}), + ('s_selector', OctetString, {'explicit': 1, 'optional': True}), + ('t_selector', OctetString, {'explicit': 2, 'optional': True}), + ('n_addresses', NAddresses, {'explicit': 3}), + ] + + +class ExtendedNetworkAddress(Choice): + _alternatives = [ + ('e163_4_address', E1634Address), + ('psap_address', PresentationAddress, {'implicit': 0}) + ] + + +class TerminalType(Integer): + _map = { + 3: 'telex', + 4: 'teletex', + 5: 'g3_facsimile', + 6: 'g4_facsimile', + 7: 'ia5_terminal', + 8: 'videotex', + } + + +class ExtensionAttributeType(Integer): + _map = { + 1: 'common_name', + 2: 'teletex_common_name', + 3: 'teletex_organization_name', + 4: 'teletex_personal_name', + 5: 'teletex_organization_unit_names', + 6: 'teletex_domain_defined_attributes', + 7: 'pds_name', + 8: 'physical_delivery_country_name', + 9: 'postal_code', + 10: 'physical_delivery_office_name', + 11: 'physical_delivery_office_number', + 12: 'extension_of_address_components', + 13: 'physical_delivery_personal_name', + 14: 'physical_delivery_organization_name', + 15: 'extension_physical_delivery_address_components', + 16: 'unformatted_postal_address', + 17: 'street_address', + 18: 'post_office_box_address', + 19: 'poste_restante_address', + 20: 'unique_postal_name', + 21: 'local_postal_attributes', + 22: 'extended_network_address', + 23: 'terminal_type', + } + + +class ExtensionAttribute(Sequence): + _fields = [ + ('extension_attribute_type', ExtensionAttributeType, {'implicit': 0}), + ('extension_attribute_value', Any, {'explicit': 1}), + ] + + _oid_pair = ('extension_attribute_type', 'extension_attribute_value') + _oid_specs = { + 'common_name': PrintableString, + 'teletex_common_name': TeletexString, + 'teletex_organization_name': TeletexString, + 'teletex_personal_name': TeletexPersonalName, + 'teletex_organization_unit_names': TeletexOrganizationalUnitNames, + 'teletex_domain_defined_attributes': TeletexDomainDefinedAttributes, + 'pds_name': PrintableString, + 'physical_delivery_country_name': PhysicalDeliveryCountryName, + 'postal_code': PostalCode, + 'physical_delivery_office_name': PDSParameter, + 'physical_delivery_office_number': PDSParameter, + 'extension_of_address_components': PDSParameter, + 'physical_delivery_personal_name': PDSParameter, + 'physical_delivery_organization_name': PDSParameter, + 'extension_physical_delivery_address_components': PDSParameter, + 'unformatted_postal_address': UnformattedPostalAddress, + 'street_address': PDSParameter, + 'post_office_box_address': PDSParameter, + 'poste_restante_address': PDSParameter, + 'unique_postal_name': PDSParameter, + 'local_postal_attributes': PDSParameter, + 'extended_network_address': ExtendedNetworkAddress, + 'terminal_type': TerminalType, + } + + +class ExtensionAttributes(SequenceOf): + _child_spec = ExtensionAttribute + + +class ORAddress(Sequence): + _fields = [ + ('built_in_standard_attributes', BuiltInStandardAttributes), + ('built_in_domain_defined_attributes', BuiltInDomainDefinedAttributes, {'optional': True}), + ('extension_attributes', ExtensionAttributes, {'optional': True}), + ] + + +class EDIPartyName(Sequence): + _fields = [ + ('name_assigner', DirectoryString, {'implicit': 0, 'optional': True}), + ('party_name', DirectoryString, {'implicit': 1}), + ] + + +class GeneralName(Choice): + _alternatives = [ + ('other_name', AnotherName, {'implicit': 0}), + ('rfc822_name', EmailAddress, {'implicit': 1}), + ('dns_name', DNSName, {'implicit': 2}), + ('x400_address', ORAddress, {'implicit': 3}), + ('directory_name', Name, {'explicit': 4}), + ('edi_party_name', EDIPartyName, {'implicit': 5}), + ('uniform_resource_identifier', URI, {'implicit': 6}), + ('ip_address', IPAddress, {'implicit': 7}), + ('registered_id', ObjectIdentifier, {'implicit': 8}), + ] + + def __ne__(self, other): + return not self == other + + def __eq__(self, other): + """ + Does not support other_name, x400_address or edi_party_name + + :param other: + The other GeneralName to compare to + + :return: + A boolean + """ + + if self.name in ('other_name', 'x400_address', 'edi_party_name'): + raise ValueError(unwrap( + ''' + Comparison is not supported for GeneralName objects of + choice %s + ''', + self.name + )) + + if other.name in ('other_name', 'x400_address', 'edi_party_name'): + raise ValueError(unwrap( + ''' + Comparison is not supported for GeneralName objects of choice + %s''', + other.name + )) + + if self.name != other.name: + return False + + return self.chosen == other.chosen + + +class GeneralNames(SequenceOf): + _child_spec = GeneralName + + +class Time(Choice): + _alternatives = [ + ('utc_time', UTCTime), + ('general_time', GeneralizedTime), + ] + + +class Validity(Sequence): + _fields = [ + ('not_before', Time), + ('not_after', Time), + ] + + +class BasicConstraints(Sequence): + _fields = [ + ('ca', Boolean, {'default': False}), + ('path_len_constraint', Integer, {'optional': True}), + ] + + +class AuthorityKeyIdentifier(Sequence): + _fields = [ + ('key_identifier', OctetString, {'implicit': 0, 'optional': True}), + ('authority_cert_issuer', GeneralNames, {'implicit': 1, 'optional': True}), + ('authority_cert_serial_number', Integer, {'implicit': 2, 'optional': True}), + ] + + +class DistributionPointName(Choice): + _alternatives = [ + ('full_name', GeneralNames, {'implicit': 0}), + ('name_relative_to_crl_issuer', RelativeDistinguishedName, {'implicit': 1}), + ] + + +class ReasonFlags(BitString): + _map = { + 0: 'unused', + 1: 'key_compromise', + 2: 'ca_compromise', + 3: 'affiliation_changed', + 4: 'superseded', + 5: 'cessation_of_operation', + 6: 'certificate_hold', + 7: 'privilege_withdrawn', + 8: 'aa_compromise', + } + + +class GeneralSubtree(Sequence): + _fields = [ + ('base', GeneralName), + ('minimum', Integer, {'implicit': 0, 'default': 0}), + ('maximum', Integer, {'implicit': 1, 'optional': True}), + ] + + +class GeneralSubtrees(SequenceOf): + _child_spec = GeneralSubtree + + +class NameConstraints(Sequence): + _fields = [ + ('permitted_subtrees', GeneralSubtrees, {'implicit': 0, 'optional': True}), + ('excluded_subtrees', GeneralSubtrees, {'implicit': 1, 'optional': True}), + ] + + +class DistributionPoint(Sequence): + _fields = [ + ('distribution_point', DistributionPointName, {'explicit': 0, 'optional': True}), + ('reasons', ReasonFlags, {'implicit': 1, 'optional': True}), + ('crl_issuer', GeneralNames, {'implicit': 2, 'optional': True}), + ] + + _url = False + + @property + def url(self): + """ + :return: + None or a unicode string of the distribution point's URL + """ + + if self._url is False: + self._url = None + name = self['distribution_point'] + if name.name != 'full_name': + raise ValueError(unwrap( + ''' + CRL distribution points that are relative to the issuer are + not supported + ''' + )) + + for general_name in name.chosen: + if general_name.name == 'uniform_resource_identifier': + url = general_name.native + if url.lower().startswith(('http://', 'https://', 'ldap://', 'ldaps://')): + self._url = url + break + + return self._url + + +class CRLDistributionPoints(SequenceOf): + _child_spec = DistributionPoint + + +class DisplayText(Choice): + _alternatives = [ + ('ia5_string', IA5String), + ('visible_string', VisibleString), + ('bmp_string', BMPString), + ('utf8_string', UTF8String), + ] + + +class NoticeNumbers(SequenceOf): + _child_spec = Integer + + +class NoticeReference(Sequence): + _fields = [ + ('organization', DisplayText), + ('notice_numbers', NoticeNumbers), + ] + + +class UserNotice(Sequence): + _fields = [ + ('notice_ref', NoticeReference, {'optional': True}), + ('explicit_text', DisplayText, {'optional': True}), + ] + + +class PolicyQualifierId(ObjectIdentifier): + _map = { + '1.3.6.1.5.5.7.2.1': 'certification_practice_statement', + '1.3.6.1.5.5.7.2.2': 'user_notice', + } + + +class PolicyQualifierInfo(Sequence): + _fields = [ + ('policy_qualifier_id', PolicyQualifierId), + ('qualifier', Any), + ] + + _oid_pair = ('policy_qualifier_id', 'qualifier') + _oid_specs = { + 'certification_practice_statement': IA5String, + 'user_notice': UserNotice, + } + + +class PolicyQualifierInfos(SequenceOf): + _child_spec = PolicyQualifierInfo + + +class PolicyIdentifier(ObjectIdentifier): + _map = { + '2.5.29.32.0': 'any_policy', + } + + +class PolicyInformation(Sequence): + _fields = [ + ('policy_identifier', PolicyIdentifier), + ('policy_qualifiers', PolicyQualifierInfos, {'optional': True}) + ] + + +class CertificatePolicies(SequenceOf): + _child_spec = PolicyInformation + + +class PolicyMapping(Sequence): + _fields = [ + ('issuer_domain_policy', PolicyIdentifier), + ('subject_domain_policy', PolicyIdentifier), + ] + + +class PolicyMappings(SequenceOf): + _child_spec = PolicyMapping + + +class PolicyConstraints(Sequence): + _fields = [ + ('require_explicit_policy', Integer, {'implicit': 0, 'optional': True}), + ('inhibit_policy_mapping', Integer, {'implicit': 1, 'optional': True}), + ] + + +class KeyPurposeId(ObjectIdentifier): + _map = { + # https://tools.ietf.org/html/rfc5280#page-45 + '2.5.29.37.0': 'any_extended_key_usage', + '1.3.6.1.5.5.7.3.1': 'server_auth', + '1.3.6.1.5.5.7.3.2': 'client_auth', + '1.3.6.1.5.5.7.3.3': 'code_signing', + '1.3.6.1.5.5.7.3.4': 'email_protection', + '1.3.6.1.5.5.7.3.5': 'ipsec_end_system', + '1.3.6.1.5.5.7.3.6': 'ipsec_tunnel', + '1.3.6.1.5.5.7.3.7': 'ipsec_user', + '1.3.6.1.5.5.7.3.8': 'time_stamping', + '1.3.6.1.5.5.7.3.9': 'ocsp_signing', + # http://tools.ietf.org/html/rfc3029.html#page-9 + '1.3.6.1.5.5.7.3.10': 'dvcs', + # http://tools.ietf.org/html/rfc6268.html#page-16 + '1.3.6.1.5.5.7.3.13': 'eap_over_ppp', + '1.3.6.1.5.5.7.3.14': 'eap_over_lan', + # https://tools.ietf.org/html/rfc5055#page-76 + '1.3.6.1.5.5.7.3.15': 'scvp_server', + '1.3.6.1.5.5.7.3.16': 'scvp_client', + # https://tools.ietf.org/html/rfc4945#page-31 + '1.3.6.1.5.5.7.3.17': 'ipsec_ike', + # https://tools.ietf.org/html/rfc5415#page-38 + '1.3.6.1.5.5.7.3.18': 'capwap_ac', + '1.3.6.1.5.5.7.3.19': 'capwap_wtp', + # https://tools.ietf.org/html/rfc5924#page-8 + '1.3.6.1.5.5.7.3.20': 'sip_domain', + # https://tools.ietf.org/html/rfc6187#page-7 + '1.3.6.1.5.5.7.3.21': 'secure_shell_client', + '1.3.6.1.5.5.7.3.22': 'secure_shell_server', + # https://tools.ietf.org/html/rfc6494#page-7 + '1.3.6.1.5.5.7.3.23': 'send_router', + '1.3.6.1.5.5.7.3.24': 'send_proxied_router', + '1.3.6.1.5.5.7.3.25': 'send_owner', + '1.3.6.1.5.5.7.3.26': 'send_proxied_owner', + # https://tools.ietf.org/html/rfc6402#page-10 + '1.3.6.1.5.5.7.3.27': 'cmc_ca', + '1.3.6.1.5.5.7.3.28': 'cmc_ra', + '1.3.6.1.5.5.7.3.29': 'cmc_archive', + # https://tools.ietf.org/html/draft-ietf-sidr-bgpsec-pki-profiles-15#page-6 + '1.3.6.1.5.5.7.3.30': 'bgpspec_router', + # https://msdn.microsoft.com/en-us/library/windows/desktop/aa378132(v=vs.85).aspx + # and https://support.microsoft.com/en-us/kb/287547 + '1.3.6.1.4.1.311.10.3.1': 'microsoft_trust_list_signing', + '1.3.6.1.4.1.311.10.3.2': 'microsoft_time_stamp_signing', + '1.3.6.1.4.1.311.10.3.3': 'microsoft_server_gated', + '1.3.6.1.4.1.311.10.3.3.1': 'microsoft_serialized', + '1.3.6.1.4.1.311.10.3.4': 'microsoft_efs', + '1.3.6.1.4.1.311.10.3.4.1': 'microsoft_efs_recovery', + '1.3.6.1.4.1.311.10.3.5': 'microsoft_whql', + '1.3.6.1.4.1.311.10.3.6': 'microsoft_nt5', + '1.3.6.1.4.1.311.10.3.7': 'microsoft_oem_whql', + '1.3.6.1.4.1.311.10.3.8': 'microsoft_embedded_nt', + '1.3.6.1.4.1.311.10.3.9': 'microsoft_root_list_signer', + '1.3.6.1.4.1.311.10.3.10': 'microsoft_qualified_subordination', + '1.3.6.1.4.1.311.10.3.11': 'microsoft_key_recovery', + '1.3.6.1.4.1.311.10.3.12': 'microsoft_document_signing', + '1.3.6.1.4.1.311.10.3.13': 'microsoft_lifetime_signing', + '1.3.6.1.4.1.311.10.3.14': 'microsoft_mobile_device_software', + # https://support.microsoft.com/en-us/help/287547/object-ids-associated-with-microsoft-cryptography + '1.3.6.1.4.1.311.20.2.2': 'microsoft_smart_card_logon', + # https://opensource.apple.com/source + # - /Security/Security-57031.40.6/Security/libsecurity_keychain/lib/SecPolicy.cpp + # - /libsecurity_cssm/libsecurity_cssm-36064/lib/oidsalg.c + '1.2.840.113635.100.1.2': 'apple_x509_basic', + '1.2.840.113635.100.1.3': 'apple_ssl', + '1.2.840.113635.100.1.4': 'apple_local_cert_gen', + '1.2.840.113635.100.1.5': 'apple_csr_gen', + '1.2.840.113635.100.1.6': 'apple_revocation_crl', + '1.2.840.113635.100.1.7': 'apple_revocation_ocsp', + '1.2.840.113635.100.1.8': 'apple_smime', + '1.2.840.113635.100.1.9': 'apple_eap', + '1.2.840.113635.100.1.10': 'apple_software_update_signing', + '1.2.840.113635.100.1.11': 'apple_ipsec', + '1.2.840.113635.100.1.12': 'apple_ichat', + '1.2.840.113635.100.1.13': 'apple_resource_signing', + '1.2.840.113635.100.1.14': 'apple_pkinit_client', + '1.2.840.113635.100.1.15': 'apple_pkinit_server', + '1.2.840.113635.100.1.16': 'apple_code_signing', + '1.2.840.113635.100.1.17': 'apple_package_signing', + '1.2.840.113635.100.1.18': 'apple_id_validation', + '1.2.840.113635.100.1.20': 'apple_time_stamping', + '1.2.840.113635.100.1.21': 'apple_revocation', + '1.2.840.113635.100.1.22': 'apple_passbook_signing', + '1.2.840.113635.100.1.23': 'apple_mobile_store', + '1.2.840.113635.100.1.24': 'apple_escrow_service', + '1.2.840.113635.100.1.25': 'apple_profile_signer', + '1.2.840.113635.100.1.26': 'apple_qa_profile_signer', + '1.2.840.113635.100.1.27': 'apple_test_mobile_store', + '1.2.840.113635.100.1.28': 'apple_otapki_signer', + '1.2.840.113635.100.1.29': 'apple_test_otapki_signer', + '1.2.840.113625.100.1.30': 'apple_id_validation_record_signing_policy', + '1.2.840.113625.100.1.31': 'apple_smp_encryption', + '1.2.840.113625.100.1.32': 'apple_test_smp_encryption', + '1.2.840.113635.100.1.33': 'apple_server_authentication', + '1.2.840.113635.100.1.34': 'apple_pcs_escrow_service', + # http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.201-2.pdf + '2.16.840.1.101.3.6.8': 'piv_card_authentication', + '2.16.840.1.101.3.6.7': 'piv_content_signing', + # https://tools.ietf.org/html/rfc4556.html + '1.3.6.1.5.2.3.4': 'pkinit_kpclientauth', + '1.3.6.1.5.2.3.5': 'pkinit_kpkdc', + # https://www.adobe.com/devnet-docs/acrobatetk/tools/DigSig/changes.html + '1.2.840.113583.1.1.5': 'adobe_authentic_documents_trust', + # https://www.idmanagement.gov/wp-content/uploads/sites/1171/uploads/fpki-pivi-cert-profiles.pdf + '2.16.840.1.101.3.8.7': 'fpki_pivi_content_signing' + } + + +class ExtKeyUsageSyntax(SequenceOf): + _child_spec = KeyPurposeId + + +class AccessMethod(ObjectIdentifier): + _map = { + '1.3.6.1.5.5.7.48.1': 'ocsp', + '1.3.6.1.5.5.7.48.2': 'ca_issuers', + '1.3.6.1.5.5.7.48.3': 'time_stamping', + '1.3.6.1.5.5.7.48.5': 'ca_repository', + } + + +class AccessDescription(Sequence): + _fields = [ + ('access_method', AccessMethod), + ('access_location', GeneralName), + ] + + +class AuthorityInfoAccessSyntax(SequenceOf): + _child_spec = AccessDescription + + +class SubjectInfoAccessSyntax(SequenceOf): + _child_spec = AccessDescription + + +# https://tools.ietf.org/html/rfc7633 +class Features(SequenceOf): + _child_spec = Integer + + +class EntrustVersionInfo(Sequence): + _fields = [ + ('entrust_vers', GeneralString), + ('entrust_info_flags', BitString) + ] + + +class NetscapeCertificateType(BitString): + _map = { + 0: 'ssl_client', + 1: 'ssl_server', + 2: 'email', + 3: 'object_signing', + 4: 'reserved', + 5: 'ssl_ca', + 6: 'email_ca', + 7: 'object_signing_ca', + } + + +class Version(Integer): + _map = { + 0: 'v1', + 1: 'v2', + 2: 'v3', + } + + +class TPMSpecification(Sequence): + _fields = [ + ('family', UTF8String), + ('level', Integer), + ('revision', Integer), + ] + + +class SetOfTPMSpecification(SetOf): + _child_spec = TPMSpecification + + +class TCGSpecificationVersion(Sequence): + _fields = [ + ('major_version', Integer), + ('minor_version', Integer), + ('revision', Integer), + ] + + +class TCGPlatformSpecification(Sequence): + _fields = [ + ('version', TCGSpecificationVersion), + ('platform_class', OctetString), + ] + + +class SetOfTCGPlatformSpecification(SetOf): + _child_spec = TCGPlatformSpecification + + +class EKGenerationType(Enumerated): + _map = { + 0: 'internal', + 1: 'injected', + 2: 'internal_revocable', + 3: 'injected_revocable', + } + + +class EKGenerationLocation(Enumerated): + _map = { + 0: 'tpm_manufacturer', + 1: 'platform_manufacturer', + 2: 'ek_cert_signer', + } + + +class EKCertificateGenerationLocation(Enumerated): + _map = { + 0: 'tpm_manufacturer', + 1: 'platform_manufacturer', + 2: 'ek_cert_signer', + } + + +class EvaluationAssuranceLevel(Enumerated): + _map = { + 1: 'level1', + 2: 'level2', + 3: 'level3', + 4: 'level4', + 5: 'level5', + 6: 'level6', + 7: 'level7', + } + + +class EvaluationStatus(Enumerated): + _map = { + 0: 'designed_to_meet', + 1: 'evaluation_in_progress', + 2: 'evaluation_completed', + } + + +class StrengthOfFunction(Enumerated): + _map = { + 0: 'basic', + 1: 'medium', + 2: 'high', + } + + +class URIReference(Sequence): + _fields = [ + ('uniform_resource_identifier', IA5String), + ('hash_algorithm', DigestAlgorithm, {'optional': True}), + ('hash_value', BitString, {'optional': True}), + ] + + +class CommonCriteriaMeasures(Sequence): + _fields = [ + ('version', IA5String), + ('assurance_level', EvaluationAssuranceLevel), + ('evaluation_status', EvaluationStatus), + ('plus', Boolean, {'default': False}), + ('strengh_of_function', StrengthOfFunction, {'implicit': 0, 'optional': True}), + ('profile_oid', ObjectIdentifier, {'implicit': 1, 'optional': True}), + ('profile_url', URIReference, {'implicit': 2, 'optional': True}), + ('target_oid', ObjectIdentifier, {'implicit': 3, 'optional': True}), + ('target_uri', URIReference, {'implicit': 4, 'optional': True}), + ] + + +class SecurityLevel(Enumerated): + _map = { + 1: 'level1', + 2: 'level2', + 3: 'level3', + 4: 'level4', + } + + +class FIPSLevel(Sequence): + _fields = [ + ('version', IA5String), + ('level', SecurityLevel), + ('plus', Boolean, {'default': False}), + ] + + +class TPMSecurityAssertions(Sequence): + _fields = [ + ('version', Version, {'default': 'v1'}), + ('field_upgradable', Boolean, {'default': False}), + ('ek_generation_type', EKGenerationType, {'implicit': 0, 'optional': True}), + ('ek_generation_location', EKGenerationLocation, {'implicit': 1, 'optional': True}), + ('ek_certificate_generation_location', EKCertificateGenerationLocation, {'implicit': 2, 'optional': True}), + ('cc_info', CommonCriteriaMeasures, {'implicit': 3, 'optional': True}), + ('fips_level', FIPSLevel, {'implicit': 4, 'optional': True}), + ('iso_9000_certified', Boolean, {'implicit': 5, 'default': False}), + ('iso_9000_uri', IA5String, {'optional': True}), + ] + + +class SetOfTPMSecurityAssertions(SetOf): + _child_spec = TPMSecurityAssertions + + +class SubjectDirectoryAttributeId(ObjectIdentifier): + _map = { + # https://tools.ietf.org/html/rfc2256#page-11 + '2.5.4.52': 'supported_algorithms', + # https://www.trustedcomputinggroup.org/wp-content/uploads/Credential_Profile_EK_V2.0_R14_published.pdf + '2.23.133.2.16': 'tpm_specification', + '2.23.133.2.17': 'tcg_platform_specification', + '2.23.133.2.18': 'tpm_security_assertions', + # https://tools.ietf.org/html/rfc3739#page-18 + '1.3.6.1.5.5.7.9.1': 'pda_date_of_birth', + '1.3.6.1.5.5.7.9.2': 'pda_place_of_birth', + '1.3.6.1.5.5.7.9.3': 'pda_gender', + '1.3.6.1.5.5.7.9.4': 'pda_country_of_citizenship', + '1.3.6.1.5.5.7.9.5': 'pda_country_of_residence', + # https://holtstrom.com/michael/tools/asn1decoder.php + '1.2.840.113533.7.68.29': 'entrust_user_role', + } + + +class SetOfGeneralizedTime(SetOf): + _child_spec = GeneralizedTime + + +class SetOfDirectoryString(SetOf): + _child_spec = DirectoryString + + +class SetOfPrintableString(SetOf): + _child_spec = PrintableString + + +class SupportedAlgorithm(Sequence): + _fields = [ + ('algorithm_identifier', AnyAlgorithmIdentifier), + ('intended_usage', KeyUsage, {'explicit': 0, 'optional': True}), + ('intended_certificate_policies', CertificatePolicies, {'explicit': 1, 'optional': True}), + ] + + +class SetOfSupportedAlgorithm(SetOf): + _child_spec = SupportedAlgorithm + + +class SubjectDirectoryAttribute(Sequence): + _fields = [ + ('type', SubjectDirectoryAttributeId), + ('values', Any), + ] + + _oid_pair = ('type', 'values') + _oid_specs = { + 'supported_algorithms': SetOfSupportedAlgorithm, + 'tpm_specification': SetOfTPMSpecification, + 'tcg_platform_specification': SetOfTCGPlatformSpecification, + 'tpm_security_assertions': SetOfTPMSecurityAssertions, + 'pda_date_of_birth': SetOfGeneralizedTime, + 'pda_place_of_birth': SetOfDirectoryString, + 'pda_gender': SetOfPrintableString, + 'pda_country_of_citizenship': SetOfPrintableString, + 'pda_country_of_residence': SetOfPrintableString, + } + + def _values_spec(self): + type_ = self['type'].native + if type_ in self._oid_specs: + return self._oid_specs[type_] + return SetOf + + _spec_callbacks = { + 'values': _values_spec + } + + +class SubjectDirectoryAttributes(SequenceOf): + _child_spec = SubjectDirectoryAttribute + + +class ExtensionId(ObjectIdentifier): + _map = { + '2.5.29.9': 'subject_directory_attributes', + '2.5.29.14': 'key_identifier', + '2.5.29.15': 'key_usage', + '2.5.29.16': 'private_key_usage_period', + '2.5.29.17': 'subject_alt_name', + '2.5.29.18': 'issuer_alt_name', + '2.5.29.19': 'basic_constraints', + '2.5.29.30': 'name_constraints', + '2.5.29.31': 'crl_distribution_points', + '2.5.29.32': 'certificate_policies', + '2.5.29.33': 'policy_mappings', + '2.5.29.35': 'authority_key_identifier', + '2.5.29.36': 'policy_constraints', + '2.5.29.37': 'extended_key_usage', + '2.5.29.46': 'freshest_crl', + '2.5.29.54': 'inhibit_any_policy', + '1.3.6.1.5.5.7.1.1': 'authority_information_access', + '1.3.6.1.5.5.7.1.11': 'subject_information_access', + # https://tools.ietf.org/html/rfc7633 + '1.3.6.1.5.5.7.1.24': 'tls_feature', + '1.3.6.1.5.5.7.48.1.5': 'ocsp_no_check', + '1.2.840.113533.7.65.0': 'entrust_version_extension', + '2.16.840.1.113730.1.1': 'netscape_certificate_type', + # https://tools.ietf.org/html/rfc6962.html#page-14 + '1.3.6.1.4.1.11129.2.4.2': 'signed_certificate_timestamp_list', + } + + +class Extension(Sequence): + _fields = [ + ('extn_id', ExtensionId), + ('critical', Boolean, {'default': False}), + ('extn_value', ParsableOctetString), + ] + + _oid_pair = ('extn_id', 'extn_value') + _oid_specs = { + 'subject_directory_attributes': SubjectDirectoryAttributes, + 'key_identifier': OctetString, + 'key_usage': KeyUsage, + 'private_key_usage_period': PrivateKeyUsagePeriod, + 'subject_alt_name': GeneralNames, + 'issuer_alt_name': GeneralNames, + 'basic_constraints': BasicConstraints, + 'name_constraints': NameConstraints, + 'crl_distribution_points': CRLDistributionPoints, + 'certificate_policies': CertificatePolicies, + 'policy_mappings': PolicyMappings, + 'authority_key_identifier': AuthorityKeyIdentifier, + 'policy_constraints': PolicyConstraints, + 'extended_key_usage': ExtKeyUsageSyntax, + 'freshest_crl': CRLDistributionPoints, + 'inhibit_any_policy': Integer, + 'authority_information_access': AuthorityInfoAccessSyntax, + 'subject_information_access': SubjectInfoAccessSyntax, + 'tls_feature': Features, + 'ocsp_no_check': Null, + 'entrust_version_extension': EntrustVersionInfo, + 'netscape_certificate_type': NetscapeCertificateType, + 'signed_certificate_timestamp_list': OctetString, + } + + +class Extensions(SequenceOf): + _child_spec = Extension + + +class TbsCertificate(Sequence): + _fields = [ + ('version', Version, {'explicit': 0, 'default': 'v1'}), + ('serial_number', Integer), + ('signature', SignedDigestAlgorithm), + ('issuer', Name), + ('validity', Validity), + ('subject', Name), + ('subject_public_key_info', PublicKeyInfo), + ('issuer_unique_id', OctetBitString, {'implicit': 1, 'optional': True}), + ('subject_unique_id', OctetBitString, {'implicit': 2, 'optional': True}), + ('extensions', Extensions, {'explicit': 3, 'optional': True}), + ] + + +class Certificate(Sequence): + _fields = [ + ('tbs_certificate', TbsCertificate), + ('signature_algorithm', SignedDigestAlgorithm), + ('signature_value', OctetBitString), + ] + + _processed_extensions = False + _critical_extensions = None + _subject_directory_attributes = None + _key_identifier_value = None + _key_usage_value = None + _subject_alt_name_value = None + _issuer_alt_name_value = None + _basic_constraints_value = None + _name_constraints_value = None + _crl_distribution_points_value = None + _certificate_policies_value = None + _policy_mappings_value = None + _authority_key_identifier_value = None + _policy_constraints_value = None + _freshest_crl_value = None + _inhibit_any_policy_value = None + _extended_key_usage_value = None + _authority_information_access_value = None + _subject_information_access_value = None + _private_key_usage_period_value = None + _tls_feature_value = None + _ocsp_no_check_value = None + _issuer_serial = None + _authority_issuer_serial = False + _crl_distribution_points = None + _delta_crl_distribution_points = None + _valid_domains = None + _valid_ips = None + _self_issued = None + _self_signed = None + _sha1 = None + _sha256 = None + + def _set_extensions(self): + """ + Sets common named extensions to private attributes and creates a list + of critical extensions + """ + + self._critical_extensions = set() + + for extension in self['tbs_certificate']['extensions']: + name = extension['extn_id'].native + attribute_name = '_%s_value' % name + if hasattr(self, attribute_name): + setattr(self, attribute_name, extension['extn_value'].parsed) + if extension['critical'].native: + self._critical_extensions.add(name) + + self._processed_extensions = True + + @property + def critical_extensions(self): + """ + Returns a set of the names (or OID if not a known extension) of the + extensions marked as critical + + :return: + A set of unicode strings + """ + + if not self._processed_extensions: + self._set_extensions() + return self._critical_extensions + + @property + def private_key_usage_period_value(self): + """ + This extension is used to constrain the period over which the subject + private key may be used + + :return: + None or a PrivateKeyUsagePeriod object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._private_key_usage_period_value + + @property + def subject_directory_attributes_value(self): + """ + This extension is used to contain additional identification attributes + about the subject. + + :return: + None or a SubjectDirectoryAttributes object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._subject_directory_attributes + + @property + def key_identifier_value(self): + """ + This extension is used to help in creating certificate validation paths. + It contains an identifier that should generally, but is not guaranteed + to, be unique. + + :return: + None or an OctetString object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._key_identifier_value + + @property + def key_usage_value(self): + """ + This extension is used to define the purpose of the public key + contained within the certificate. + + :return: + None or a KeyUsage + """ + + if not self._processed_extensions: + self._set_extensions() + return self._key_usage_value + + @property + def subject_alt_name_value(self): + """ + This extension allows for additional names to be associate with the + subject of the certificate. While it may contain a whole host of + possible names, it is usually used to allow certificates to be used + with multiple different domain names. + + :return: + None or a GeneralNames object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._subject_alt_name_value + + @property + def issuer_alt_name_value(self): + """ + This extension allows associating one or more alternative names with + the issuer of the certificate. + + :return: + None or an x509.GeneralNames object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._issuer_alt_name_value + + @property + def basic_constraints_value(self): + """ + This extension is used to determine if the subject of the certificate + is a CA, and if so, what the maximum number of intermediate CA certs + after this are, before an end-entity certificate is found. + + :return: + None or a BasicConstraints object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._basic_constraints_value + + @property + def name_constraints_value(self): + """ + This extension is used in CA certificates, and is used to limit the + possible names of certificates issued. + + :return: + None or a NameConstraints object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._name_constraints_value + + @property + def crl_distribution_points_value(self): + """ + This extension is used to help in locating the CRL for this certificate. + + :return: + None or a CRLDistributionPoints object + extension + """ + + if not self._processed_extensions: + self._set_extensions() + return self._crl_distribution_points_value + + @property + def certificate_policies_value(self): + """ + This extension defines policies in CA certificates under which + certificates may be issued. In end-entity certificates, the inclusion + of a policy indicates the issuance of the certificate follows the + policy. + + :return: + None or a CertificatePolicies object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._certificate_policies_value + + @property + def policy_mappings_value(self): + """ + This extension allows mapping policy OIDs to other OIDs. This is used + to allow different policies to be treated as equivalent in the process + of validation. + + :return: + None or a PolicyMappings object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._policy_mappings_value + + @property + def authority_key_identifier_value(self): + """ + This extension helps in identifying the public key with which to + validate the authenticity of the certificate. + + :return: + None or an AuthorityKeyIdentifier object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._authority_key_identifier_value + + @property + def policy_constraints_value(self): + """ + This extension is used to control if policy mapping is allowed and + when policies are required. + + :return: + None or a PolicyConstraints object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._policy_constraints_value + + @property + def freshest_crl_value(self): + """ + This extension is used to help locate any available delta CRLs + + :return: + None or an CRLDistributionPoints object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._freshest_crl_value + + @property + def inhibit_any_policy_value(self): + """ + This extension is used to prevent mapping of the any policy to + specific requirements + + :return: + None or a Integer object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._inhibit_any_policy_value + + @property + def extended_key_usage_value(self): + """ + This extension is used to define additional purposes for the public key + beyond what is contained in the basic constraints. + + :return: + None or an ExtKeyUsageSyntax object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._extended_key_usage_value + + @property + def authority_information_access_value(self): + """ + This extension is used to locate the CA certificate used to sign this + certificate, or the OCSP responder for this certificate. + + :return: + None or an AuthorityInfoAccessSyntax object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._authority_information_access_value + + @property + def subject_information_access_value(self): + """ + This extension is used to access information about the subject of this + certificate. + + :return: + None or a SubjectInfoAccessSyntax object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._subject_information_access_value + + @property + def tls_feature_value(self): + """ + This extension is used to list the TLS features a server must respond + with if a client initiates a request supporting them. + + :return: + None or a Features object + """ + + if not self._processed_extensions: + self._set_extensions() + return self._tls_feature_value + + @property + def ocsp_no_check_value(self): + """ + This extension is used on certificates of OCSP responders, indicating + that revocation information for the certificate should never need to + be verified, thus preventing possible loops in path validation. + + :return: + None or a Null object (if present) + """ + + if not self._processed_extensions: + self._set_extensions() + return self._ocsp_no_check_value + + @property + def signature(self): + """ + :return: + A byte string of the signature + """ + + return self['signature_value'].native + + @property + def signature_algo(self): + """ + :return: + A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa", "ecdsa" + """ + + return self['signature_algorithm'].signature_algo + + @property + def hash_algo(self): + """ + :return: + A unicode string of "md2", "md5", "sha1", "sha224", "sha256", + "sha384", "sha512", "sha512_224", "sha512_256" + """ + + return self['signature_algorithm'].hash_algo + + @property + def public_key(self): + """ + :return: + The PublicKeyInfo object for this certificate + """ + + return self['tbs_certificate']['subject_public_key_info'] + + @property + def subject(self): + """ + :return: + The Name object for the subject of this certificate + """ + + return self['tbs_certificate']['subject'] + + @property + def issuer(self): + """ + :return: + The Name object for the issuer of this certificate + """ + + return self['tbs_certificate']['issuer'] + + @property + def serial_number(self): + """ + :return: + An integer of the certificate's serial number + """ + + return self['tbs_certificate']['serial_number'].native + + @property + def key_identifier(self): + """ + :return: + None or a byte string of the certificate's key identifier from the + key identifier extension + """ + + if not self.key_identifier_value: + return None + + return self.key_identifier_value.native + + @property + def issuer_serial(self): + """ + :return: + A byte string of the SHA-256 hash of the issuer concatenated with + the ascii character ":", concatenated with the serial number as + an ascii string + """ + + if self._issuer_serial is None: + self._issuer_serial = self.issuer.sha256 + b':' + str_cls(self.serial_number).encode('ascii') + return self._issuer_serial + + @property + def authority_key_identifier(self): + """ + :return: + None or a byte string of the key_identifier from the authority key + identifier extension + """ + + if not self.authority_key_identifier_value: + return None + + return self.authority_key_identifier_value['key_identifier'].native + + @property + def authority_issuer_serial(self): + """ + :return: + None or a byte string of the SHA-256 hash of the isser from the + authority key identifier extension concatenated with the ascii + character ":", concatenated with the serial number from the + authority key identifier extension as an ascii string + """ + + if self._authority_issuer_serial is False: + akiv = self.authority_key_identifier_value + if akiv and akiv['authority_cert_issuer'].native: + issuer = self.authority_key_identifier_value['authority_cert_issuer'][0].chosen + # We untag the element since it is tagged via being a choice from GeneralName + issuer = issuer.untag() + authority_serial = self.authority_key_identifier_value['authority_cert_serial_number'].native + self._authority_issuer_serial = issuer.sha256 + b':' + str_cls(authority_serial).encode('ascii') + else: + self._authority_issuer_serial = None + return self._authority_issuer_serial + + @property + def crl_distribution_points(self): + """ + Returns complete CRL URLs - does not include delta CRLs + + :return: + A list of zero or more DistributionPoint objects + """ + + if self._crl_distribution_points is None: + self._crl_distribution_points = self._get_http_crl_distribution_points(self.crl_distribution_points_value) + return self._crl_distribution_points + + @property + def delta_crl_distribution_points(self): + """ + Returns delta CRL URLs - does not include complete CRLs + + :return: + A list of zero or more DistributionPoint objects + """ + + if self._delta_crl_distribution_points is None: + self._delta_crl_distribution_points = self._get_http_crl_distribution_points(self.freshest_crl_value) + return self._delta_crl_distribution_points + + def _get_http_crl_distribution_points(self, crl_distribution_points): + """ + Fetches the DistributionPoint object for non-relative, HTTP CRLs + referenced by the certificate + + :param crl_distribution_points: + A CRLDistributionPoints object to grab the DistributionPoints from + + :return: + A list of zero or more DistributionPoint objects + """ + + output = [] + + if crl_distribution_points is None: + return [] + + for distribution_point in crl_distribution_points: + distribution_point_name = distribution_point['distribution_point'] + if distribution_point_name is VOID: + continue + # RFC 5280 indicates conforming CA should not use the relative form + if distribution_point_name.name == 'name_relative_to_crl_issuer': + continue + # This library is currently only concerned with HTTP-based CRLs + for general_name in distribution_point_name.chosen: + if general_name.name == 'uniform_resource_identifier': + output.append(distribution_point) + + return output + + @property + def ocsp_urls(self): + """ + :return: + A list of zero or more unicode strings of the OCSP URLs for this + cert + """ + + if not self.authority_information_access_value: + return [] + + output = [] + for entry in self.authority_information_access_value: + if entry['access_method'].native == 'ocsp': + location = entry['access_location'] + if location.name != 'uniform_resource_identifier': + continue + url = location.native + if url.lower().startswith(('http://', 'https://', 'ldap://', 'ldaps://')): + output.append(url) + return output + + @property + def valid_domains(self): + """ + :return: + A list of unicode strings of valid domain names for the certificate. + Wildcard certificates will have a domain in the form: *.example.com + """ + + if self._valid_domains is None: + self._valid_domains = [] + + # For the subject alt name extension, we can look at the name of + # the choice selected since it distinguishes between domain names, + # email addresses, IPs, etc + if self.subject_alt_name_value: + for general_name in self.subject_alt_name_value: + if general_name.name == 'dns_name' and general_name.native not in self._valid_domains: + self._valid_domains.append(general_name.native) + + # If there was no subject alt name extension, and the common name + # in the subject looks like a domain, that is considered the valid + # list. This is done because according to + # https://tools.ietf.org/html/rfc6125#section-6.4.4, the common + # name should not be used if the subject alt name is present. + else: + pattern = re.compile('^(\\*\\.)?(?:[a-zA-Z0-9](?:[a-zA-Z0-9\\-]*[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,}$') + for rdn in self.subject.chosen: + for name_type_value in rdn: + if name_type_value['type'].native == 'common_name': + value = name_type_value['value'].native + if pattern.match(value): + self._valid_domains.append(value) + + return self._valid_domains + + @property + def valid_ips(self): + """ + :return: + A list of unicode strings of valid IP addresses for the certificate + """ + + if self._valid_ips is None: + self._valid_ips = [] + + if self.subject_alt_name_value: + for general_name in self.subject_alt_name_value: + if general_name.name == 'ip_address': + self._valid_ips.append(general_name.native) + + return self._valid_ips + + @property + def ca(self): + """ + :return; + A boolean - if the certificate is marked as a CA + """ + + return self.basic_constraints_value and self.basic_constraints_value['ca'].native + + @property + def max_path_length(self): + """ + :return; + None or an integer of the maximum path length + """ + + if not self.ca: + return None + return self.basic_constraints_value['path_len_constraint'].native + + @property + def self_issued(self): + """ + :return: + A boolean - if the certificate is self-issued, as defined by RFC + 5280 + """ + + if self._self_issued is None: + self._self_issued = self.subject == self.issuer + return self._self_issued + + @property + def self_signed(self): + """ + :return: + A unicode string of "no" or "maybe". The "maybe" result will + be returned if the certificate issuer and subject are the same. + If a key identifier and authority key identifier are present, + they will need to match otherwise "no" will be returned. + + To verify is a certificate is truly self-signed, the signature + will need to be verified. See the certvalidator package for + one possible solution. + """ + + if self._self_signed is None: + self._self_signed = 'no' + if self.self_issued: + if self.key_identifier: + if not self.authority_key_identifier: + self._self_signed = 'maybe' + elif self.authority_key_identifier == self.key_identifier: + self._self_signed = 'maybe' + else: + self._self_signed = 'maybe' + return self._self_signed + + @property + def sha1(self): + """ + :return: + The SHA-1 hash of the DER-encoded bytes of this complete certificate + """ + + if self._sha1 is None: + self._sha1 = hashlib.sha1(self.dump()).digest() + return self._sha1 + + @property + def sha1_fingerprint(self): + """ + :return: + A unicode string of the SHA-1 hash, formatted using hex encoding + with a space between each pair of characters, all uppercase + """ + + return ' '.join('%02X' % c for c in bytes_to_list(self.sha1)) + + @property + def sha256(self): + """ + :return: + The SHA-256 hash of the DER-encoded bytes of this complete + certificate + """ + + if self._sha256 is None: + self._sha256 = hashlib.sha256(self.dump()).digest() + return self._sha256 + + @property + def sha256_fingerprint(self): + """ + :return: + A unicode string of the SHA-256 hash, formatted using hex encoding + with a space between each pair of characters, all uppercase + """ + + return ' '.join('%02X' % c for c in bytes_to_list(self.sha256)) + + def is_valid_domain_ip(self, domain_ip): + """ + Check if a domain name or IP address is valid according to the + certificate + + :param domain_ip: + A unicode string of a domain name or IP address + + :return: + A boolean - if the domain or IP is valid for the certificate + """ + + if not isinstance(domain_ip, str_cls): + raise TypeError(unwrap( + ''' + domain_ip must be a unicode string, not %s + ''', + type_name(domain_ip) + )) + + encoded_domain_ip = domain_ip.encode('idna').decode('ascii').lower() + + is_ipv6 = encoded_domain_ip.find(':') != -1 + is_ipv4 = not is_ipv6 and re.match('^\\d+\\.\\d+\\.\\d+\\.\\d+$', encoded_domain_ip) + is_domain = not is_ipv6 and not is_ipv4 + + # Handle domain name checks + if is_domain: + if not self.valid_domains: + return False + + domain_labels = encoded_domain_ip.split('.') + + for valid_domain in self.valid_domains: + encoded_valid_domain = valid_domain.encode('idna').decode('ascii').lower() + valid_domain_labels = encoded_valid_domain.split('.') + + # The domain must be equal in label length to match + if len(valid_domain_labels) != len(domain_labels): + continue + + if valid_domain_labels == domain_labels: + return True + + is_wildcard = self._is_wildcard_domain(encoded_valid_domain) + if is_wildcard and self._is_wildcard_match(domain_labels, valid_domain_labels): + return True + + return False + + # Handle IP address checks + if not self.valid_ips: + return False + + family = socket.AF_INET if is_ipv4 else socket.AF_INET6 + normalized_ip = inet_pton(family, encoded_domain_ip) + + for valid_ip in self.valid_ips: + valid_family = socket.AF_INET if valid_ip.find('.') != -1 else socket.AF_INET6 + normalized_valid_ip = inet_pton(valid_family, valid_ip) + + if normalized_valid_ip == normalized_ip: + return True + + return False + + def _is_wildcard_domain(self, domain): + """ + Checks if a domain is a valid wildcard according to + https://tools.ietf.org/html/rfc6125#section-6.4.3 + + :param domain: + A unicode string of the domain name, where any U-labels from an IDN + have been converted to A-labels + + :return: + A boolean - if the domain is a valid wildcard domain + """ + + # The * character must be present for a wildcard match, and if there is + # most than one, it is an invalid wildcard specification + if domain.count('*') != 1: + return False + + labels = domain.lower().split('.') + + if not labels: + return False + + # Wildcards may only appear in the left-most label + if labels[0].find('*') == -1: + return False + + # Wildcards may not be embedded in an A-label from an IDN + if labels[0][0:4] == 'xn--': + return False + + return True + + def _is_wildcard_match(self, domain_labels, valid_domain_labels): + """ + Determines if the labels in a domain are a match for labels from a + wildcard valid domain name + + :param domain_labels: + A list of unicode strings, with A-label form for IDNs, of the labels + in the domain name to check + + :param valid_domain_labels: + A list of unicode strings, with A-label form for IDNs, of the labels + in a wildcard domain pattern + + :return: + A boolean - if the domain matches the valid domain + """ + + first_domain_label = domain_labels[0] + other_domain_labels = domain_labels[1:] + + wildcard_label = valid_domain_labels[0] + other_valid_domain_labels = valid_domain_labels[1:] + + # The wildcard is only allowed in the first label, so if + # The subsequent labels are not equal, there is no match + if other_domain_labels != other_valid_domain_labels: + return False + + if wildcard_label == '*': + return True + + wildcard_regex = re.compile('^' + wildcard_label.replace('*', '.*') + '$') + if wildcard_regex.match(first_domain_label): + return True + + return False + + +# The structures are taken from the OpenSSL source file x_x509a.c, and specify +# extra information that is added to X.509 certificates to store trust +# information about the certificate. + +class KeyPurposeIdentifiers(SequenceOf): + _child_spec = KeyPurposeId + + +class SequenceOfAlgorithmIdentifiers(SequenceOf): + _child_spec = AlgorithmIdentifier + + +class CertificateAux(Sequence): + _fields = [ + ('trust', KeyPurposeIdentifiers, {'optional': True}), + ('reject', KeyPurposeIdentifiers, {'implicit': 0, 'optional': True}), + ('alias', UTF8String, {'optional': True}), + ('keyid', OctetString, {'optional': True}), + ('other', SequenceOfAlgorithmIdentifiers, {'implicit': 1, 'optional': True}), + ] + + +class TrustedCertificate(Concat): + _child_specs = [Certificate, CertificateAux] diff --git a/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/INSTALLER b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/LICENSE b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/LICENSE new file mode 100644 index 0000000..3bbadc3 --- /dev/null +++ b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2015-2016 Markus Unterwaditzer + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/METADATA b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/METADATA new file mode 100644 index 0000000..ca2c892 --- /dev/null +++ b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/METADATA @@ -0,0 +1,145 @@ +Metadata-Version: 2.1 +Name: atomicwrites +Version: 1.4.0 +Summary: Atomic file writes. +Home-page: https://github.com/untitaker/python-atomicwrites +Author: Markus Unterwaditzer +Author-email: markus@unterwaditzer.net +License: MIT +Platform: UNKNOWN +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* + +=================== +python-atomicwrites +=================== + +.. image:: https://travis-ci.org/untitaker/python-atomicwrites.svg?branch=master + :target: https://travis-ci.org/untitaker/python-atomicwrites + +.. image:: https://ci.appveyor.com/api/projects/status/vadc4le3c27to59x/branch/master?svg=true + :target: https://ci.appveyor.com/project/untitaker/python-atomicwrites/branch/master + +Atomic file writes. + +.. code-block:: python + + from atomicwrites import atomic_write + + with atomic_write('foo.txt', overwrite=True) as f: + f.write('Hello world.') + # "foo.txt" doesn't exist yet. + + # Now it does. + + +Features that distinguish it from other similar libraries (see `Alternatives and Credit`_): + +- Race-free assertion that the target file doesn't yet exist. This can be + controlled with the ``overwrite`` parameter. + +- Windows support, although not well-tested. The MSDN resources are not very + explicit about which operations are atomic. I'm basing my assumptions off `a + comment + `_ + by `Doug Crook + `_, who appears + to be a Microsoft employee: + + FAQ: Is MoveFileEx atomic + Frequently asked question: Is MoveFileEx atomic if the existing and new + files are both on the same drive? + + The simple answer is "usually, but in some cases it will silently fall-back + to a non-atomic method, so don't count on it". + + The implementation of MoveFileEx looks something like this: [...] + + The problem is if the rename fails, you might end up with a CopyFile, which + is definitely not atomic. + + If you really need atomic-or-nothing, you can try calling + NtSetInformationFile, which is unsupported but is much more likely to be + atomic. + +- Simple high-level API that wraps a very flexible class-based API. + +- Consistent error handling across platforms. + + +How it works +============ + +It uses a temporary file in the same directory as the given path. This ensures +that the temporary file resides on the same filesystem. + +The temporary file will then be atomically moved to the target location: On +POSIX, it will use ``rename`` if files should be overwritten, otherwise a +combination of ``link`` and ``unlink``. On Windows, it uses MoveFileEx_ through +stdlib's ``ctypes`` with the appropriate flags. + +Note that with ``link`` and ``unlink``, there's a timewindow where the file +might be available under two entries in the filesystem: The name of the +temporary file, and the name of the target file. + +Also note that the permissions of the target file may change this way. In some +situations a ``chmod`` can be issued without any concurrency problems, but +since that is not always the case, this library doesn't do it by itself. + +.. _MoveFileEx: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365240%28v=vs.85%29.aspx + +fsync +----- + +On POSIX, ``fsync`` is invoked on the temporary file after it is written (to +flush file content and metadata), and on the parent directory after the file is +moved (to flush filename). + +``fsync`` does not take care of disks' internal buffers, but there don't seem +to be any standard POSIX APIs for that. On OS X, ``fcntl`` is used with +``F_FULLFSYNC`` instead of ``fsync`` for that reason. + +On Windows, `_commit `_ +is used, but there are no guarantees about disk internal buffers. + +Alternatives and Credit +======================= + +Atomicwrites is directly inspired by the following libraries (and shares a +minimal amount of code): + +- The Trac project's `utility functions + `_, + also used in `Werkzeug `_ and + `mitsuhiko/python-atomicfile + `_. The idea to use + ``ctypes`` instead of ``PyWin32`` originated there. + +- `abarnert/fatomic `_. Windows support + (based on ``PyWin32``) was originally taken from there. + +Other alternatives to atomicwrites include: + +- `sashka/atomicfile `_. Originally I + considered using that, but at the time it was lacking a lot of features I + needed (Windows support, overwrite-parameter, overriding behavior through + subclassing). + +- The `Boltons library collection `_ + features a class for atomic file writes, which seems to have a very similar + ``overwrite`` parameter. It is lacking Windows support though. + +License +======= + +Licensed under the MIT, see ``LICENSE``. + + diff --git a/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/RECORD b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/RECORD new file mode 100644 index 0000000..4b49946 --- /dev/null +++ b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/RECORD @@ -0,0 +1,9 @@ +atomicwrites-1.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +atomicwrites-1.4.0.dist-info/LICENSE,sha256=h4Mp8L2HitAVEpzovagvSB6G7C6Agx6QnA1nFx2SLnM,1069 +atomicwrites-1.4.0.dist-info/METADATA,sha256=C0889LUauSNbRgzOwLjcI-RFU-Q7ICAvPPxSk_pFN4Q,5585 +atomicwrites-1.4.0.dist-info/RECORD,, +atomicwrites-1.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +atomicwrites-1.4.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +atomicwrites-1.4.0.dist-info/top_level.txt,sha256=ks64zKVUkrl2ZrrP046CsytXlSGf8gLG-IcoXpNyeoc,13 +atomicwrites/__init__.py,sha256=N_LFjMO0nQ9NXMyGQTod3my4OodSCX-FUshHUThV2_4,6794 +atomicwrites/__init__.pyc,, diff --git a/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/REQUESTED b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/WHEEL b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/WHEEL new file mode 100644 index 0000000..ef99c6c --- /dev/null +++ b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/top_level.txt b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..5fa5a87 --- /dev/null +++ b/venv/lib/python2.7/site-packages/atomicwrites-1.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +atomicwrites diff --git a/venv/lib/python2.7/site-packages/atomicwrites/__init__.py b/venv/lib/python2.7/site-packages/atomicwrites/__init__.py new file mode 100644 index 0000000..623826e --- /dev/null +++ b/venv/lib/python2.7/site-packages/atomicwrites/__init__.py @@ -0,0 +1,226 @@ +import contextlib +import io +import os +import sys +import tempfile + +try: + import fcntl +except ImportError: + fcntl = None + +# `fspath` was added in Python 3.6 +try: + from os import fspath +except ImportError: + fspath = None + +__version__ = '1.4.0' + + +PY2 = sys.version_info[0] == 2 + +text_type = unicode if PY2 else str # noqa + + +def _path_to_unicode(x): + if not isinstance(x, text_type): + return x.decode(sys.getfilesystemencoding()) + return x + + +DEFAULT_MODE = "wb" if PY2 else "w" + + +_proper_fsync = os.fsync + + +if sys.platform != 'win32': + if hasattr(fcntl, 'F_FULLFSYNC'): + def _proper_fsync(fd): + # https://lists.apple.com/archives/darwin-dev/2005/Feb/msg00072.html + # https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man2/fsync.2.html + # https://github.com/untitaker/python-atomicwrites/issues/6 + fcntl.fcntl(fd, fcntl.F_FULLFSYNC) + + def _sync_directory(directory): + # Ensure that filenames are written to disk + fd = os.open(directory, 0) + try: + _proper_fsync(fd) + finally: + os.close(fd) + + def _replace_atomic(src, dst): + os.rename(src, dst) + _sync_directory(os.path.normpath(os.path.dirname(dst))) + + def _move_atomic(src, dst): + os.link(src, dst) + os.unlink(src) + + src_dir = os.path.normpath(os.path.dirname(src)) + dst_dir = os.path.normpath(os.path.dirname(dst)) + _sync_directory(dst_dir) + if src_dir != dst_dir: + _sync_directory(src_dir) +else: + from ctypes import windll, WinError + + _MOVEFILE_REPLACE_EXISTING = 0x1 + _MOVEFILE_WRITE_THROUGH = 0x8 + _windows_default_flags = _MOVEFILE_WRITE_THROUGH + + def _handle_errors(rv): + if not rv: + raise WinError() + + def _replace_atomic(src, dst): + _handle_errors(windll.kernel32.MoveFileExW( + _path_to_unicode(src), _path_to_unicode(dst), + _windows_default_flags | _MOVEFILE_REPLACE_EXISTING + )) + + def _move_atomic(src, dst): + _handle_errors(windll.kernel32.MoveFileExW( + _path_to_unicode(src), _path_to_unicode(dst), + _windows_default_flags + )) + + +def replace_atomic(src, dst): + ''' + Move ``src`` to ``dst``. If ``dst`` exists, it will be silently + overwritten. + + Both paths must reside on the same filesystem for the operation to be + atomic. + ''' + return _replace_atomic(src, dst) + + +def move_atomic(src, dst): + ''' + Move ``src`` to ``dst``. There might a timewindow where both filesystem + entries exist. If ``dst`` already exists, :py:exc:`FileExistsError` will be + raised. + + Both paths must reside on the same filesystem for the operation to be + atomic. + ''' + return _move_atomic(src, dst) + + +class AtomicWriter(object): + ''' + A helper class for performing atomic writes. Usage:: + + with AtomicWriter(path).open() as f: + f.write(...) + + :param path: The destination filepath. May or may not exist. + :param mode: The filemode for the temporary file. This defaults to `wb` in + Python 2 and `w` in Python 3. + :param overwrite: If set to false, an error is raised if ``path`` exists. + Errors are only raised after the file has been written to. Either way, + the operation is atomic. + + If you need further control over the exact behavior, you are encouraged to + subclass. + ''' + + def __init__(self, path, mode=DEFAULT_MODE, overwrite=False, + **open_kwargs): + if 'a' in mode: + raise ValueError( + 'Appending to an existing file is not supported, because that ' + 'would involve an expensive `copy`-operation to a temporary ' + 'file. Open the file in normal `w`-mode and copy explicitly ' + 'if that\'s what you\'re after.' + ) + if 'x' in mode: + raise ValueError('Use the `overwrite`-parameter instead.') + if 'w' not in mode: + raise ValueError('AtomicWriters can only be written to.') + + # Attempt to convert `path` to `str` or `bytes` + if fspath is not None: + path = fspath(path) + + self._path = path + self._mode = mode + self._overwrite = overwrite + self._open_kwargs = open_kwargs + + def open(self): + ''' + Open the temporary file. + ''' + return self._open(self.get_fileobject) + + @contextlib.contextmanager + def _open(self, get_fileobject): + f = None # make sure f exists even if get_fileobject() fails + try: + success = False + with get_fileobject(**self._open_kwargs) as f: + yield f + self.sync(f) + self.commit(f) + success = True + finally: + if not success: + try: + self.rollback(f) + except Exception: + pass + + def get_fileobject(self, suffix="", prefix=tempfile.gettempprefix(), + dir=None, **kwargs): + '''Return the temporary file to use.''' + if dir is None: + dir = os.path.normpath(os.path.dirname(self._path)) + descriptor, name = tempfile.mkstemp(suffix=suffix, prefix=prefix, + dir=dir) + # io.open() will take either the descriptor or the name, but we need + # the name later for commit()/replace_atomic() and couldn't find a way + # to get the filename from the descriptor. + os.close(descriptor) + kwargs['mode'] = self._mode + kwargs['file'] = name + return io.open(**kwargs) + + def sync(self, f): + '''responsible for clearing as many file caches as possible before + commit''' + f.flush() + _proper_fsync(f.fileno()) + + def commit(self, f): + '''Move the temporary file to the target location.''' + if self._overwrite: + replace_atomic(f.name, self._path) + else: + move_atomic(f.name, self._path) + + def rollback(self, f): + '''Clean up all temporary resources.''' + os.unlink(f.name) + + +def atomic_write(path, writer_cls=AtomicWriter, **cls_kwargs): + ''' + Simple atomic writes. This wraps :py:class:`AtomicWriter`:: + + with atomic_write(path) as f: + f.write(...) + + :param path: The target path to write to. + :param writer_cls: The writer class to use. This parameter is useful if you + subclassed :py:class:`AtomicWriter` to change some behavior and want to + use that new subclass. + + Additional keyword arguments are passed to the writer class. See + :py:class:`AtomicWriter`. + ''' + return writer_cls(path, **cls_kwargs).open() diff --git a/venv/lib/python2.7/site-packages/attr/__init__.py b/venv/lib/python2.7/site-packages/attr/__init__.py new file mode 100644 index 0000000..7a79e57 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/__init__.py @@ -0,0 +1,76 @@ +from __future__ import absolute_import, division, print_function + +import sys + +from functools import partial + +from . import converters, exceptions, filters, setters, validators +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._version_info import VersionInfo + + +__version__ = "20.2.0" +__version_info__ = VersionInfo._from_version_string(__version__) + +__title__ = "attrs" +__description__ = "Classes Without Boilerplate" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ +__doc__ = __description__ + " <" + __uri__ + ">" + +__author__ = "Hynek Schlawack" +__email__ = "hs@ox.cx" + +__license__ = "MIT" +__copyright__ = "Copyright (c) 2015 Hynek Schlawack" + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + +__all__ = [ + "Attribute", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "converters", + "evolve", + "exceptions", + "fields", + "fields_dict", + "filters", + "get_run_validators", + "has", + "ib", + "make_class", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] + +if sys.version_info[:2] >= (3, 6): + from ._next_gen import define, field, frozen, mutable + + __all__.extend((define, field, frozen, mutable)) diff --git a/venv/lib/python2.7/site-packages/attr/__init__.pyi b/venv/lib/python2.7/site-packages/attr/__init__.pyi new file mode 100644 index 0000000..0869914 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/__init__.pyi @@ -0,0 +1,423 @@ +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Optional, + Sequence, + Mapping, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import exceptions as exceptions +from . import filters as filters +from . import converters as converters +from . import setters as setters +from . import validators as validators + +from ._version_info import VersionInfo + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_ValidatorType = Callable[[Any, Attribute[_T], _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[[Attribute[_T], _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# _make -- + +NOTHING: object + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +@overload +def Factory(factory: Callable[[], _T]) -> _T: ... +@overload +def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., +) -> _T: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: bool + eq: bool + order: bool + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _C: ... +@overload +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Callable[[_C], _C]: ... +@overload +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _C: ... +@overload +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Callable[[_C], _C]: ... + +mutable = define +frozen = define # they differ only in their defaults + +# TODO: add support for returning NamedTuple from the mypy plugin +class _Fields(Tuple[Attribute[Any], ...]): + def __getattr__(self, name: str) -> Attribute[Any]: ... + +def fields(cls: type) -> _Fields: ... +def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ... +def validate(inst: Any) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., +) -> _C: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> bool: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/venv/lib/python2.7/site-packages/attr/_compat.py b/venv/lib/python2.7/site-packages/attr/_compat.py new file mode 100644 index 0000000..bed5b13 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/_compat.py @@ -0,0 +1,231 @@ +from __future__ import absolute_import, division, print_function + +import platform +import sys +import types +import warnings + + +PY2 = sys.version_info[0] == 2 +PYPY = platform.python_implementation() == "PyPy" + + +if PYPY or sys.version_info[:2] >= (3, 6): + ordered_dict = dict +else: + from collections import OrderedDict + + ordered_dict = OrderedDict + + +if PY2: + from collections import Mapping, Sequence + + from UserDict import IterableUserDict + + # We 'bundle' isclass instead of using inspect as importing inspect is + # fairly expensive (order of 10-15 ms for a modern machine in 2016) + def isclass(klass): + return isinstance(klass, (type, types.ClassType)) + + # TYPE is used in exceptions, repr(int) is different on Python 2 and 3. + TYPE = "type" + + def iteritems(d): + return d.iteritems() + + # Python 2 is bereft of a read-only dict proxy, so we make one! + class ReadOnlyDict(IterableUserDict): + """ + Best-effort read-only dict wrapper. + """ + + def __setitem__(self, key, val): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item assignment" + ) + + def update(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'update'" + ) + + def __delitem__(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item deletion" + ) + + def clear(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'clear'" + ) + + def pop(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'pop'" + ) + + def popitem(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'popitem'" + ) + + def setdefault(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'setdefault'" + ) + + def __repr__(self): + # Override to be identical to the Python 3 version. + return "mappingproxy(" + repr(self.data) + ")" + + def metadata_proxy(d): + res = ReadOnlyDict() + res.data.update(d) # We blocked update, so we have to do it like this. + return res + + def just_warn(*args, **kw): # pragma: nocover + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + + +else: # Python 3 and later. + from collections.abc import Mapping, Sequence # noqa + + def just_warn(*args, **kw): + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + def isclass(klass): + return isinstance(klass, type) + + TYPE = "class" + + def iteritems(d): + return d.items() + + def metadata_proxy(d): + return types.MappingProxyType(dict(d)) + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: # pragma: no cover + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + if PY2: + co = set_first_cellvar_to.func_code + else: + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + # CPython 3.8+ has an incompatible CodeType signature + # (added a posonlyargcount argument) but also added + # CodeType.replace() to do this without counting parameters. + set_first_freevar_code = co.replace( + co_cellvars=co.co_freevars, co_freevars=co.co_cellvars + ) + else: + args = [co.co_argcount] + if not PY2: + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + if PY2: + cell = make_func_with_cell().func_closure[0] + else: + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() diff --git a/venv/lib/python2.7/site-packages/attr/_config.py b/venv/lib/python2.7/site-packages/attr/_config.py new file mode 100644 index 0000000..8ec9209 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/_config.py @@ -0,0 +1,23 @@ +from __future__ import absolute_import, division, print_function + + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + """ + if not isinstance(run, bool): + raise TypeError("'run' must be bool.") + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + """ + return _run_validators diff --git a/venv/lib/python2.7/site-packages/attr/_funcs.py b/venv/lib/python2.7/site-packages/attr/_funcs.py new file mode 100644 index 0000000..ca92f9f --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/_funcs.py @@ -0,0 +1,338 @@ +from __future__ import absolute_import, division, print_function + +import copy + +from ._compat import iteritems +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a dict. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attr.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + + :rtype: return type of *dict_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, True, filter, dict_factory, retain_collection_types + ) + elif isinstance(v, (tuple, list, set)): + cf = v.__class__ if retain_collection_types is True else list + rv[a.name] = cf( + [ + _asdict_anything( + i, filter, dict_factory, retain_collection_types + ) + for i in v + ] + ) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, filter, df, retain_collection_types + ), + _asdict_anything( + vv, filter, df, retain_collection_types + ), + ) + for kk, vv in iteritems(v) + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything(val, filter, dict_factory, retain_collection_types): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict(val, True, filter, dict_factory, retain_collection_types) + elif isinstance(val, (tuple, list, set)): + cf = val.__class__ if retain_collection_types is True else list + rv = cf( + [ + _asdict_anything( + i, filter, dict_factory, retain_collection_types + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything(kk, filter, df, retain_collection_types), + _asdict_anything(vv, filter, df, retain_collection_types), + ) + for kk, vv in iteritems(val) + ) + else: + rv = val + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a tuple. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attr.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set)): + cf = v.__class__ if retain is True else list + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in iteritems(v) + ) + ) + else: + rv.append(v) + else: + rv.append(v) + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with ``attrs`` attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + return getattr(cls, "__attrs_attrs__", None) is not None + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't + be found on *cls*. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. deprecated:: 17.1.0 + Use `evolve` instead. + """ + import warnings + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in iteritems(changes): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + raise AttrsAttributeNotFoundError( + "{k} is not an attrs attribute on {cl}.".format( + k=k, cl=new.__class__ + ) + ) + _obj_setattr(new, k, v) + return new + + +def evolve(inst, **changes): + """ + Create a new instance, based on *inst* with *changes* applied. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 17.1.0 + """ + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = attr_name if attr_name[0] != "_" else attr_name[1:] + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types(cls, globalns=None, localns=None): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attr.s`. That means + the decorator has to come in the line **before** `attr.s`. + + .. versionadded:: 20.1.0 + """ + try: + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + cls.__attrs_types_resolved__ + except AttributeError: + import typing + + hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) + for field in fields(cls): + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + cls.__attrs_types_resolved__ = True + + # Return the class so you can use it as a decorator too. + return cls diff --git a/venv/lib/python2.7/site-packages/attr/_make.py b/venv/lib/python2.7/site-packages/attr/_make.py new file mode 100644 index 0000000..0fbbd7c --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/_make.py @@ -0,0 +1,2653 @@ +from __future__ import absolute_import, division, print_function + +import copy +import linecache +import sys +import threading +import uuid +import warnings + +from operator import itemgetter + +from . import _config, setters +from ._compat import ( + PY2, + isclass, + iteritems, + metadata_proxy, + ordered_dict, + set_closure_cell, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + PythonTooOldError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_%s" +_init_factory_pat = "__attr_factory_{}" +_tuple_property_pat = ( + " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" +) +_classvar_prefixes = ("typing.ClassVar", "t.ClassVar", "ClassVar") +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = metadata_proxy({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + + +class _Nothing(object): + """ + Sentinel class to indicate the lack of a value when ``None`` is ambiguous. + + ``_Nothing`` is a singleton. There is only ever one of it. + """ + + _singleton = None + + def __new__(cls): + if _Nothing._singleton is None: + _Nothing._singleton = super(_Nothing, cls).__new__(cls) + return _Nothing._singleton + + def __repr__(self): + return "NOTHING" + + +NOTHING = _Nothing() +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + if PY2: + # For some reason `type(None)` isn't callable in Python 2, but we don't + # actually need a constructor for None objects, we just need any + # available function that returns None. + def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)): + return _none_constructor, _args + + else: + + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with + `attr.s`! + + :param default: A value that is used if an ``attrs``-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to `attr.NOTHING`), a value + *must* be supplied when instantiating; otherwise a `TypeError` + will be raised. + + The default can also be set using decorator notation as shown below. + + :type default: Any value + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(factory)``. + + :param validator: `callable` that is called by ``attrs``-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the `Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `get_run_validators`. + + The validator can also be set using decorator notation as shown below. + + :type validator: `callable` or a `list` of `callable`\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` + method. If ``True``, include the attribute; if ``False``, omit it. By + default, the built-in ``repr()`` function is used. To override how the + attribute value is formatted, pass a ``callable`` that takes a single + value and returns a string. Note that the resulting string is used + as-is, i.e. it will be used directly *instead* of calling ``repr()`` + (the default). + :type repr: a `bool` or a `callable` to use a custom function. + :param bool eq: If ``True`` (default), include this attribute in the + generated ``__eq__`` and ``__ne__`` methods that check two instances + for equality. + :param bool order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. + :param bool cmp: Setting to ``True`` is equivalent to setting ``eq=True, + order=True``. Deprecated in favor of *eq* and *order*. + :param Optional[bool] hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + :param callable converter: `callable` that is called by + ``attrs``-generated ``__init__`` methods to convert attribute's value + to the desired format. It is given the passed-in value, and the + returned value will be used as the new value of the attribute. The + value is converted before being passed to the validator, if any. + :param metadata: An arbitrary mapping, to be used by third-party + components. See `extending_metadata`. + :param type: The type of the attribute. In Python 3.6 or greater, the + preferred method to specify the type is using a variable annotation + (see `PEP 526 `_). + This argument is provided for backward compatibility. + Regardless of the approach used, the type will be stored on + ``Attribute.type``. + + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + `static type checking `. + :param kw_only: Make this attribute keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attr.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attr.setters.NO_OP` + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + """ + eq, order = _determine_eq_order(cmp, eq, order, True) + + if hash is not None and hash is not True and hash is not False: + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + + if factory is not None: + if default is not NOTHING: + raise ValueError( + "The `default` and `factory` arguments are mutually " + "exclusive." + ) + if not callable(factory): + raise ValueError("The `factory` argument must be a callable.") + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + ) + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = "{}Attributes".format(cls_name) + attr_class_template = [ + "class {}(tuple):".format(attr_class_name), + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + _tuple_property_pat.format(index=i, attr_name=attr_name) + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + eval(compile("\n".join(attr_class_template), "", "exec"), globs) + + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + return str(annot).startswith(_classvar_prefixes) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + + Requires Python 3. + """ + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False + + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} + + +def _counter_getter(e): + """ + Key function for sorting to avoid re-creating a lambda for every class. + """ + return e[1].counter + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a._assoc(inherited=True) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a._assoc(inherited=True) + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs(cls, these, auto_attribs, kw_only, collect_by_mro): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = [(name, ca) for name, ca in iteritems(these)] + + if not isinstance(these, ordered_dict): + ca_list.sort(key=_counter_getter) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + if not isinstance(a, _CountingAttr): + if a is NOTHING: + a = attrib() + else: + a = attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + attr_names = [a.name for a in base_attrs + own_attrs] + + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + if kw_only: + own_attrs = [a._assoc(kw_only=True) for a in own_attrs] + base_attrs = [a._assoc(kw_only=True) for a in base_attrs] + + attrs = AttrsClass(base_attrs + own_attrs) + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + raise ValueError( + "No mandatory attributes allowed after an attribute with a " + "default value or factory. Attribute in question: %r" % (a,) + ) + + if had_default is False and a.default is not NOTHING: + had_default = True + + return _Attributes((attrs, base_attrs, base_attr_map)) + + +def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder(object): + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_has_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = set(a.name for a in base_attrs) + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._has_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._has_own_setattr = True + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + else: + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._has_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = object.__setattr__ + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + base_names = self._base_names + cd = { + k: v + for k, v in iteritems(self._cls_dict) + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._has_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = object.__setattr__ + break + + # Traverse the MRO to check for an existing __weakref__. + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + break + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) + + qualname = getattr(self._cls, "__qualname__", None) + if qualname is not None: + cd["__qualname__"] = qualname + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # https://github.com/python-attrs/attrs/issues/102. On Python 3, + # if a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in cls.__dict__.values(): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # ValueError: Cell is empty + pass + else: + if match: + set_closure_cell(cell, cls) + + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns=ns) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + raise ValueError( + "__str__ can only be generated if a __repr__ exists." + ) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return tuple(getattr(self, name) for name in state_attr_names) + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr is not None + and self._on_setattr is not setters.NO_OP, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + raise ValueError( + "Can't combine custom __setattr__ with on_setattr hooks." + ) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._has_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + try: + method.__module__ = self._cls.__module__ + except AttributeError: + pass + + try: + method.__qualname__ = ".".join( + (self._cls.__qualname__, method.__name__) + ) + except AttributeError: + pass + + try: + method.__doc__ = "Method generated by attrs for class %s." % ( + self._cls.__qualname__, + ) + except AttributeError: + pass + + return method + + +_CMP_DEPRECATION = ( + "The usage of `cmp` is deprecated and will be removed on or after " + "2021-06-01. Please use `eq` and `order` instead." +) + + +def _determine_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=3) + + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, order + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + + auto_detect must be False on Python 2. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, +): + r""" + A class decorator that adds `dunder + `_\ -methods according to the + specified attributes using `attr.ib` or the *these* argument. + + :param these: A dictionary of name to `attr.ib` mappings. This is + useful to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, ``attrs`` will *not* search the class body + for attributes and will *not* remove any attributes from it. + + If *these* is an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the attributes inside *these*. Otherwise the order + of the definition of the attributes is used. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, + ``attrs`` will deduce ``eq=False`` and won't create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible + ``__ne__`` by default, so it *should* be enough to only implement + ``__eq__`` in most cases). + + .. warning:: + + If you prevent ``attrs`` from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, + *cmp*, or *hash* overrides whatever *auto_detect* would determine. + + *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises + a `PythonTooOldError`. + + :param bool repr: Create a ``__repr__`` method with a human readable + representation of ``attrs`` attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for + `Exception`\ s. + :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their ``attrs`` + attributes if and only if the types of both classes are *identical*! + :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + :param Optional[bool] cmp: Setting to ``True`` is equivalent to setting + ``eq=True, order=True``. Deprecated in favor of *eq* and *order*, has + precedence over them for backward-compatibility though. Must not be + mixed with *eq* or *order*. + :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method + is generated according how *eq* and *frozen* are set. + + 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force + ``attrs`` to create one (e.g. if the class is immutable even though you + didn't freeze it programmatically) by passing ``True`` or not. Both of + these cases are rather special and should be used carefully. + + See our documentation on `hashing`, Python's documentation on + `object.__hash__`, and the `GitHub issue that led to the default \ + behavior `_ for more + details. + :param bool init: Create a ``__init__`` method that initializes the + ``attrs`` attributes. Leading underscores are stripped for the + argument name. If a ``__attrs_post_init__`` method exists on the + class, it will be called after the class is fully initialized. + :param bool slots: Create a `slotted class ` that's more + memory-efficient. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attr.exceptions.FrozenInstanceError` is raised. + + Please note: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated + attributes (Python 3.6 and later only) from the class body. + + In this case, you **must** annotate every field. If ``attrs`` + encounters a field that is set to an `attr.ib` but lacks a type + annotation, an `attr.exceptions.UnannotatedAttributeError` is + raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't + want to set a type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `Factory` also + works as expected. + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ + :param bool kw_only: Make all attributes keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, avoid any reassignments of + fields involved in hash code computation or mutations of the objects + those fields point to after object creation. If such changes occur, + the behavior of the object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` + (which implicitly includes any subclass of any exception), the + following happens to behave like a well-behaved Python exceptions + class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. ``attrs`` will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatability. + + See issue `#428 `_ for + more details. + + :param Optional[bool] getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and + ``__setstate__`` are generated and attached to the class. This is + necessary for slotted classes to be pickleable. If left `None`, it's + `True` by default for slotted classes and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, + and **either** ``__getstate__`` or ``__setstate__`` is detected directly + on the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same argument + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attr.setters.pipe`. + + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + """ + if auto_detect and PY2: + raise PythonTooOldError( + "auto_detect only works on Python 3 and later." + ) + + eq_, order_ = _determine_eq_order(cmp, eq, order, None) + hash_ = hash # work around the lack of nonlocal + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + + if getattr(cls, "__class__", None) is None: + raise TypeError("attrs only works with new-style classes.") + + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + raise ValueError("Can't freeze a class with a custom __setattr__.") + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + if ( + hash_ is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + else: + hash = hash_ + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + elif hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +if PY2: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return ( + getattr(cls.__setattr__, "__module__", None) + == _frozen_setattrs.__module__ + and cls.__setattr__.__name__ == _frozen_setattrs.__name__ + ) + + +else: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ == _frozen_setattrs + + +def _attrs_to_tuple(obj, attrs): + """ + Create a tuple of all values of *obj*'s *attrs*. + """ + return tuple(getattr(obj, a.name) for a in attrs) + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + unique_id = uuid.uuid4() + extra = "" + count = 1 + + while True: + unique_filename = "".format( + func_name, + cls.__module__, + getattr(cls, "__qualname__", cls.__name__), + extra, + ) + # To handle concurrency we essentially "reserve" our spot in + # the linecache with a dummy line. The caller can then + # set this value correctly. + cache_line = (1, None, (str(unique_id),), unique_filename) + if ( + linecache.cache.setdefault(unique_filename, cache_line) + == cache_line + ): + return unique_filename + + # Looks like this spot is taken. Try again. + count += 1 + extra = "-{0}".format(count) + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + if not PY2: + hash_def += ", *" + + hash_def += ( + ", _cache_wrapper=" + + "__import__('attr._make')._make._CacheHashWrapper):" + ) + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + " %d," % (type_hash,), + ] + ) + + for a in attrs: + method_lines.append(indent + " self.%s," % a.name) + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) + if frozen: + append_hash_computation_lines( + "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + "self.%s = " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab + "return self.%s" % _hash_cache_field) + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + globs = {} + locs = {} + bytecode = compile(script, unique_filename, "exec") + eval(bytecode, globs, locs) + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + linecache.cache[unique_filename] = ( + len(script), + None, + script.splitlines(True), + unique_filename, + ) + + return locs["__hash__"] + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + lines.append(" self.%s," % (a.name,)) + others.append(" other.%s," % (a.name,)) + + lines += others + [" )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + globs = {} + locs = {} + bytecode = compile(script, unique_filename, "exec") + eval(bytecode, globs, locs) + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + linecache.cache[unique_filename] = ( + len(script), + None, + script.splitlines(True), + unique_filename, + ) + return locs["__eq__"] + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return _attrs_to_tuple(obj, attrs) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +_already_repring = threading.local() + + +def _make_repr(attrs, ns): + """ + Make a repr method that includes relevant *attrs*, adding *ns* to the full + name. + """ + + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom callable. + attr_names_with_reprs = tuple( + (a.name, repr if a.repr is True else a.repr) + for a in attrs + if a.repr is not False + ) + + def __repr__(self): + """ + Automatically created by attrs. + """ + try: + working_set = _already_repring.working_set + except AttributeError: + working_set = set() + _already_repring.working_set = working_set + + if id(self) in working_set: + return "..." + real_cls = self.__class__ + if ns is None: + qualname = getattr(real_cls, "__qualname__", None) + if qualname is not None: + class_name = qualname.rsplit(">.", 1)[-1] + else: + class_name = real_cls.__name__ + else: + class_name = ns + "." + real_cls.__name__ + + # Since 'self' remains on the stack (i.e.: strongly referenced) for the + # duration of this call, it's safe to depend on id(...) stability, and + # not need to track the instance and therefore worry about properties + # like weakref- or hash-ability. + working_set.add(id(self)) + try: + result = [class_name, "("] + first = True + for name, attr_repr in attr_names_with_reprs: + if first: + first = False + else: + result.append(", ") + result.extend( + (name, "=", attr_repr(getattr(self, name, NOTHING))) + ) + return "".join(result) + ")" + finally: + working_set.remove(id(self)) + + return __repr__ + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns) + return cls + + +def fields(cls): + """ + Return the tuple of ``attrs`` attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: tuple (with name accessors) of `attr.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of ``attrs`` attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: an ordered dict where keys are attribute names and values are + `attr.Attribute`\\ s. This will be a `dict` if it's + naturally ordered like on Python 3.6+ or an + :class:`~collections.OrderedDict` otherwise. + + .. versionadded:: 18.1.0 + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return ordered_dict(((a.name, a) for a in attrs)) + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with ``attrs`` attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _make_init( + cls, + attrs, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + has_global_on_setattr, +): + if frozen and has_global_on_setattr: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = True + elif ( + has_global_on_setattr and a.on_setattr is not setters.NO_OP + ) or _is_slot_attr(a.name, base_attr_map): + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_global_on_setattr, + ) + locs = {} + bytecode = compile(script, unique_filename, "exec") + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr"] = _obj_setattr + + eval(bytecode, globs, locs) + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + linecache.cache[unique_filename] = ( + len(script), + None, + script.splitlines(True), + unique_filename, + ) + + __init__ = locs["__init__"] + __init__.__annotations__ = annotations + + return __init__ + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return "_setattr('%s', %s)" % (attr_name, value_var) + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr('%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return "self.%s = %s" % (attr_name, value) + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _attrs_to_init_script( + attrs, + frozen, + slots, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_global_on_setattr, +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr.__get__(self, self.__class__)" + ) + + if frozen is True: + if slots is True: + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + + def fmt_setter(attr_name, value_var, has_on_setattr): + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return "_inst_dict['%s'] = %s" % (attr_name, value_var) + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + else: + # Not frozen. + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_global_on_setattr + ) + arg_name = a.name.lstrip("_") + + has_factory = isinstance(a.default, Factory) + if has_factory and a.default.takes_self: + maybe_self = "self" + else: + maybe_self = "" + + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = "%s=NOTHING" % (arg_name,) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append("if %s is not NOTHING:" % (arg_name,)) + + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True and a.converter is None and a.type is not None: + annotations[arg_name] = a.type + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append( + " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) + ) + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join("self." + a.name for a in attrs if a.init) + + lines.append("BaseException.__init__(self, %s)" % (vals,)) + + args = ", ".join(args) + if kw_only_args: + if PY2: + raise PythonTooOldError( + "Keyword-only arguments only work on Python 3 and later." + ) + + args += "{leading_comma}*, {kw_only_args}".format( + leading_comma=", " if args else "", + kw_only_args=", ".join(kw_only_args), + ) + return ( + """\ +def __init__(self, {args}): + {lines} +""".format( + args=args, lines="\n ".join(lines) if lines else "pass" + ), + names_for_globals, + annotations, + ) + + +class Attribute(object): + """ + *Read-only* representation of an attribute. + + :attribute name: The name of the attribute. + :attribute inherited: Whether or not that attribute has been inherited from + a base class. + + Plus *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)``. + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "order", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + ): + eq, order = _determine_eq_order(cmp, eq, order, True) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self, Attribute) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("order", order) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + metadata_proxy(metadata) + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + raise ValueError( + "Type annotation and type argument cannot both be present" + ) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict + ) + + @property + def cmp(self): + """ + Simulate the presence of a cmp attribute and warn. + """ + warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) + + return self.eq and self.order + + # Don't use attr.assoc since fields(Attribute) doesn't work + def _assoc(self, **changes): + """ + Copy *self* and apply *changes*. + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + metadata_proxy(value) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr(object): + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + ) + __attrs_attrs__ = tuple( + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + order=False, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + ) + ) + ( + Attribute( + name="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + order=False, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, # XXX: unused, remove along with cmp + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + order, + on_setattr, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.order = order + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +@attrs(slots=True, init=False, hash=True) +class Factory(object): + """ + Stores a factory callable. + + If passed as the default value to `attr.ib`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + factory = attrib() + takes_self = attrib() + + def __init__(self, factory, takes_self=False): + """ + `Factory` is part of the default machinery so if we want a default + value here, we have to implement it ourselves. + """ + self.factory = factory + self.takes_self = takes_self + + +def make_class(name, attrs, bases=(object,), **attributes_arguments): + """ + A quick way to create a new class called *name* with *attrs*. + + :param str name: The name for the new class. + + :param attrs: A list of names or a dictionary of mappings of names to + attributes. + + If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the names or attributes inside *attrs*. Otherwise the + order of the definition of the attributes is used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = dict((a, attrib()) for a in attrs) + else: + raise TypeError("attrs argument must be a dict or a list.") + + post_init = cls_dict.pop("__attrs_post_init__", None) + type_ = type( + name, + bases, + {} if post_init is None else {"__attrs_post_init__": post_init}, + ) + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + except (AttributeError, ValueError): + pass + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, hash=True) +class _AndValidator(object): + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param callables validators: Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + return pipe_converter diff --git a/venv/lib/python2.7/site-packages/attr/_next_gen.py b/venv/lib/python2.7/site-packages/attr/_next_gen.py new file mode 100644 index 0000000..b5ff60e --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/_next_gen.py @@ -0,0 +1,158 @@ +""" +This is a Python 3.6 and later-only, keyword-only, and **provisional** API that +calls `attr.s` with different default values. + +Provisional APIs that shall become "import attrs" one glorious day. +""" + +from functools import partial + +from attr.exceptions import UnannotatedAttributeError + +from . import setters +from ._make import NOTHING, _frozen_setattrs, attrib, attrs + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, +): + r""" + The only behavioral differences are the handling of the *auto_attribs* + option: + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If all attributes are annotated and no `attr.ib` is found, it assumes + *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attr.ib`\ s. + + and that mutable classes (``frozen=False``) validate on ``__setattr__``. + + .. versionadded:: 20.1.0 + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = setters.validate + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + raise ValueError( + "Frozen classes can't use on_setattr " + "(frozen-ness was inherited)." + ) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + ) diff --git a/venv/lib/python2.7/site-packages/attr/_version_info.py b/venv/lib/python2.7/site-packages/attr/_version_info.py new file mode 100644 index 0000000..014e78a --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/_version_info.py @@ -0,0 +1,85 @@ +from __future__ import absolute_import, division, print_function + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo(object): + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/venv/lib/python2.7/site-packages/attr/_version_info.pyi b/venv/lib/python2.7/site-packages/attr/_version_info.pyi new file mode 100644 index 0000000..45ced08 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/venv/lib/python2.7/site-packages/attr/converters.py b/venv/lib/python2.7/site-packages/attr/converters.py new file mode 100644 index 0000000..715ce17 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/converters.py @@ -0,0 +1,85 @@ +""" +Commonly useful converters. +""" + +from __future__ import absolute_import, division, print_function + +from ._make import NOTHING, Factory, pipe + + +__all__ = [ + "pipe", + "optional", + "default_if_none", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attr.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes not parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attr.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter diff --git a/venv/lib/python2.7/site-packages/attr/converters.pyi b/venv/lib/python2.7/site-packages/attr/converters.pyi new file mode 100644 index 0000000..7b0caa1 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/converters.pyi @@ -0,0 +1,11 @@ +from typing import TypeVar, Optional, Callable, overload +from . import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... diff --git a/venv/lib/python2.7/site-packages/attr/exceptions.py b/venv/lib/python2.7/site-packages/attr/exceptions.py new file mode 100644 index 0000000..fcd8910 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/exceptions.py @@ -0,0 +1,92 @@ +from __future__ import absolute_import, division, print_function + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute haave been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An ``attrs`` function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-``attrs`` class has been passed into an ``attrs`` function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set using ``attr.ib()`` and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type + annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an ``attrs`` feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A ``attr.ib()`` requiring a callable has been set with a value + that is not callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/venv/lib/python2.7/site-packages/attr/exceptions.pyi b/venv/lib/python2.7/site-packages/attr/exceptions.pyi new file mode 100644 index 0000000..f268011 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/venv/lib/python2.7/site-packages/attr/filters.py b/venv/lib/python2.7/site-packages/attr/filters.py new file mode 100644 index 0000000..dc47e8f --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/filters.py @@ -0,0 +1,52 @@ +""" +Commonly useful filters for `attr.asdict`. +""" + +from __future__ import absolute_import, division, print_function + +from ._compat import isclass +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isclass(cls)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Whitelist *what*. + + :param what: What to whitelist. + :type what: `list` of `type` or `attr.Attribute`\\ s + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def include_(attribute, value): + return value.__class__ in cls or attribute in attrs + + return include_ + + +def exclude(*what): + """ + Blacklist *what*. + + :param what: What to blacklist. + :type what: `list` of classes or `attr.Attribute`\\ s. + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def exclude_(attribute, value): + return value.__class__ not in cls and attribute not in attrs + + return exclude_ diff --git a/venv/lib/python2.7/site-packages/attr/filters.pyi b/venv/lib/python2.7/site-packages/attr/filters.pyi new file mode 100644 index 0000000..68368fe --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/filters.pyi @@ -0,0 +1,5 @@ +from typing import Union, Any +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/venv/lib/python2.7/site-packages/attr/py.typed b/venv/lib/python2.7/site-packages/attr/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python2.7/site-packages/attr/setters.py b/venv/lib/python2.7/site-packages/attr/setters.py new file mode 100644 index 0000000..240014b --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/setters.py @@ -0,0 +1,77 @@ +""" +Commonly used hooks for on_setattr. +""" + +from __future__ import absolute_import, division, print_function + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +NO_OP = object() +""" +Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. + +Does not work in `pipe` or within lists. + +.. versionadded:: 20.1.0 +""" diff --git a/venv/lib/python2.7/site-packages/attr/setters.pyi b/venv/lib/python2.7/site-packages/attr/setters.pyi new file mode 100644 index 0000000..19bc33f --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/setters.pyi @@ -0,0 +1,18 @@ +from . import _OnSetAttrType, Attribute +from typing import TypeVar, Any, NewType, NoReturn, cast + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute, new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/venv/lib/python2.7/site-packages/attr/validators.py b/venv/lib/python2.7/site-packages/attr/validators.py new file mode 100644 index 0000000..b9a7305 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/validators.py @@ -0,0 +1,379 @@ +""" +Commonly useful validators. +""" + +from __future__ import absolute_import, division, print_function + +import re + +from ._make import _AndValidator, and_, attrib, attrs +from .exceptions import NotCallableError + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "in_", + "instance_of", + "is_callable", + "matches_re", + "optional", + "provides", +] + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator(object): + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + raise TypeError( + "'{name}' must be {type!r} (got {value!r} that is a " + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attr.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator(object): + regex = attrib() + flags = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + raise ValueError( + "'{name}' must match regex {regex!r}" + " ({value!r} doesn't)".format( + name=attr.name, regex=self.regex.pattern, value=value + ), + attr, + self.regex, + value, + ) + + def __repr__(self): + return "".format( + regex=self.regex + ) + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param str regex: a regex string to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call (options + are `re.fullmatch`, `re.search`, `re.match`, default + is ``None`` which means either `re.fullmatch` or an emulation of + it on Python 2). For performance reasons, they won't be used directly + but on a pre-`re.compile`\ ed pattern. + + .. versionadded:: 19.2.0 + """ + fullmatch = getattr(re, "fullmatch", None) + valid_funcs = (fullmatch, None, re.search, re.match) + if func not in valid_funcs: + raise ValueError( + "'func' must be one of %s." + % ( + ", ".join( + sorted( + e and e.__name__ or "None" for e in set(valid_funcs) + ) + ), + ) + ) + + pattern = re.compile(regex, flags) + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + else: + if fullmatch: + match_func = pattern.fullmatch + else: + pattern = re.compile(r"(?:{})\Z".format(regex), flags) + match_func = pattern.match + + return _MatchesReValidator(pattern, flags, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator(object): + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attr.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator(object): + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return "".format( + what=repr(self.validator) + ) + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param validator: A validator (or a list of validators) that is used for + non-``None`` values. + :type validator: callable or `list` of callables. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + """ + if isinstance(validator, list): + return _OptionalValidator(_AndValidator(validator)) + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator(object): + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + raise ValueError( + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ) + ) + + def __repr__(self): + return "".format( + options=self.options + ) + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attr.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator(object): + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attr.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises `attr.exceptions.NotCallableError`: With a human readable error + message containing the attribute (`attr.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable(object): + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else " {iterable!r}".format(iterable=self.iterable_validator) + ) + return ( + "" + ).format( + iterable_identifier=iterable_identifier, + member=self.member_validator, + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping(object): + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) diff --git a/venv/lib/python2.7/site-packages/attr/validators.pyi b/venv/lib/python2.7/site-packages/attr/validators.pyi new file mode 100644 index 0000000..9a22abb --- /dev/null +++ b/venv/lib/python2.7/site-packages/attr/validators.pyi @@ -0,0 +1,66 @@ +from typing import ( + Container, + List, + Union, + TypeVar, + Type, + Any, + Optional, + Tuple, + Iterable, + Mapping, + Callable, + Match, + AnyStr, + overload, +) +from . import _ValidatorType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: AnyStr, + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... diff --git a/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/AUTHORS.rst b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/AUTHORS.rst new file mode 100644 index 0000000..f14ef6c --- /dev/null +++ b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/AUTHORS.rst @@ -0,0 +1,11 @@ +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? diff --git a/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/INSTALLER b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/LICENSE b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/LICENSE new file mode 100644 index 0000000..7ae3df9 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/METADATA b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/METADATA new file mode 100644 index 0000000..911bee4 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/METADATA @@ -0,0 +1,243 @@ +Metadata-Version: 2.1 +Name: attrs +Version: 20.2.0 +Summary: Classes Without Boilerplate +Home-page: https://www.attrs.org/ +Author: Hynek Schlawack +Author-email: hs@ox.cx +Maintainer: Hynek Schlawack +Maintainer-email: hs@ox.cx +License: MIT +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues +Project-URL: Source Code, https://github.com/python-attrs/attrs +Keywords: class,attribute,boilerplate +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Description-Content-Type: text/x-rst +Provides-Extra: dev +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'dev' +Requires-Dist: hypothesis ; extra == 'dev' +Requires-Dist: pympler ; extra == 'dev' +Requires-Dist: pytest (>=4.3.0) ; extra == 'dev' +Requires-Dist: six ; extra == 'dev' +Requires-Dist: zope.interface ; extra == 'dev' +Requires-Dist: sphinx ; extra == 'dev' +Requires-Dist: sphinx-rtd-theme ; extra == 'dev' +Requires-Dist: pre-commit ; extra == 'dev' +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme ; extra == 'docs' +Requires-Dist: zope.interface ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests' +Requires-Dist: hypothesis ; extra == 'tests' +Requires-Dist: pympler ; extra == 'tests' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests' +Requires-Dist: six ; extra == 'tests' +Requires-Dist: zope.interface ; extra == 'tests' +Provides-Extra: tests_no_zope +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests_no_zope' +Requires-Dist: hypothesis ; extra == 'tests_no_zope' +Requires-Dist: pympler ; extra == 'tests_no_zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests_no_zope' +Requires-Dist: six ; extra == 'tests_no_zope' + +.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png + :alt: attrs Logo + +====================================== +``attrs``: Classes Without Boilerplate +====================================== + +.. image:: https://readthedocs.org/projects/attrs/badge/?version=stable + :target: https://www.attrs.org/en/stable/?badge=stable + :alt: Documentation Status + +.. image:: https://github.com/python-attrs/attrs/workflows/CI/badge.svg?branch=master + :target: https://github.com/python-attrs/attrs/actions?workflow=CI + :alt: CI Status + +.. image:: https://codecov.io/github/python-attrs/attrs/branch/master/graph/badge.svg + :target: https://codecov.io/github/python-attrs/attrs + :alt: Test Coverage + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: black + +.. teaser-begin + +``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder `_ methods). + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + +.. teaser-end + +For that, it gives you a class decorator and a way to declaratively define the attributes on that class: + +.. -code-begin- + +.. code-block:: pycon + + >>> import attr + + >>> @attr.s + ... class SomeClass(object): + ... a_number = attr.ib(default=42) + ... list_of_numbers = attr.ib(factory=list) + ... + ... def hard_math(self, another_number): + ... return self.a_number + sum(self.list_of_numbers) * another_number + + + >>> sc = SomeClass(1, [1, 2, 3]) + >>> sc + SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + + >>> sc.hard_math(3) + 19 + >>> sc == SomeClass(1, [1, 2, 3]) + True + >>> sc != SomeClass(2, [3, 2, 1]) + True + + >>> attr.asdict(sc) + {'a_number': 1, 'list_of_numbers': [1, 2, 3]} + + >>> SomeClass() + SomeClass(a_number=42, list_of_numbers=[]) + + >>> C = attr.make_class("C", ["a", "b"]) + >>> C("foo", "bar") + C(a='foo', b='bar') + + +After *declaring* your attributes ``attrs`` gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable ``__repr__``, +- a complete set of comparison methods (equality and ordering), +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +On Python 3.6 and later, you can often even drop the calls to ``attr.ib()`` by using `type annotations `_. + +This gives you the power to use actual classes with actual types in your code instead of confusing ``tuple``\ s or `confusingly behaving `_ ``namedtuple``\ s. +Which in turn encourages you to write *small classes* that do `one thing well `_. +Never again violate the `single responsibility principle `_ just because implementing ``__init__`` et al is a painful drag. + + +.. -getting-help- + +Getting Help +============ + +Please use the ``python-attrs`` tag on `StackOverflow `_ to get help. + +Answering questions of your fellow developers is also great way to help the project! + + +.. -project-information- + +Project Information +=================== + +``attrs`` is released under the `MIT `_ license, +its documentation lives at `Read the Docs `_, +the code on `GitHub `_, +and the latest release on `PyPI `_. +It’s rigorously tested on Python 2.7, 3.5+, and PyPy. + +We collect information on **third-party extensions** in our `wiki `_. +Feel free to browse and add your own! + +If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide `_ to get you started! + + +``attrs`` for Enterprise +------------------------ + +Available as part of the Tidelift Subscription. + +The maintainers of ``attrs`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +`Learn more. `_ + + +Release Information +=================== + +20.2.0 (2020-09-05) +------------------- + +Backward-incompatible Changes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- ``attr.define()``, ``attr.frozen()``, ``attr.mutable()``, and ``attr.field()`` remain **provisional**. + + This release fixes a bunch of bugs and ergonomics but they remain mostly unchanged. + + If you wish to use them together with mypy, you can simply drop `this plugin `_ into your project. + + Feel free to provide feedback to them in the linked issue #668. + + We will release the ``attrs`` namespace once we have the feeling that the APIs have properly settled. + `#668 `_ + + +Changes +^^^^^^^ + +- ``attr.define()`` et al now correct detect ``__eq__`` and ``__ne__``. + `#671 `_ +- ``attr.define()`` et al's hybrid behavior now also works correctly when arguments are passed. + `#675 `_ +- It's possible to define custom ``__setattr__`` methods on slotted classes again. + `#681 `_ +- In 20.1.0 we introduced the ``inherited`` attribute on the ``attr.Attribute`` class to differentiate attributes that have been inherited and those that have been defined directly on the class. + + It has shown to be problematic to involve that attribute when comparing instances of ``attr.Attribute`` though, because when sub-classing, attributes from base classes are suddenly not equal to themselves in a super class. + + Therefore the ``inherited`` attribute will now be ignored when hashing and comparing instances of ``attr.Attribute``. + `#684 `_ +- ``zope.interface`` is now a "soft dependency" when running the test suite; if ``zope.interface`` is not installed when running the test suite, the interface-related tests will be automatically skipped. + `#685 `_ +- The ergonomics of creating frozen classes using ``@define(frozen=True)`` and sub-classing frozen classes has been improved: + you don't have to set ``on_setattr=None`` anymore. + `#687 `_ + +`Full changelog `_. + +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? + + diff --git a/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/RECORD b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/RECORD new file mode 100644 index 0000000..de29d4c --- /dev/null +++ b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/RECORD @@ -0,0 +1,39 @@ +attr/__init__.py,sha256=gODfVynsRH0KilEZpKDZc-9uzw-gVKsAvMswrGKaLDg,1568 +attr/__init__.pyc,, +attr/__init__.pyi,sha256=NdphwugFzDCzbGu8W_AMaOVVYF_QXMeGltMuNDiTdho,12452 +attr/_compat.py,sha256=pHA95FXMosGIAb_qur4Jvg3R2hiX9DttK5rMc1eO9PA,7327 +attr/_compat.pyc,, +attr/_config.py,sha256=_KvW0mQdH2PYjHc0YfIUaV_o2pVfM7ziMEYTxwmEhOA,514 +attr/_config.pyc,, +attr/_funcs.py,sha256=sEk3Zl_ktu_fMxbEmFq9SR_2lsqnIQhQbyopgD6qEZw,11640 +attr/_funcs.pyc,, +attr/_make.py,sha256=ueNlGrc5rbCtPsXXoiu4Wfv1tV_SA2tFF_YXKaHceAk,84913 +attr/_make.pyc,, +attr/_next_gen.py,sha256=hB6oheeMQ5BZI8Z5NVzB9f4XibJIwDNgnUxp7bA_hjQ,4061 +attr/_version_info.py,sha256=azMi1lNelb3cJvvYUMXsXVbUANkRzbD5IEiaXVpeVr4,2162 +attr/_version_info.pyc,, +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=CaK6iLtEMmemrqU8LQ1D2nWtbo9dGPAv4UaZ0rFzhOA,2214 +attr/converters.pyc,, +attr/converters.pyi,sha256=fVGSfawF3NMy2EBApkC7dAwMuujWCHnGEnnAgsbkVpg,380 +attr/exceptions.py,sha256=gmlET97ikqdQVvy7Ff9p7zVvqc2SsNtTd-r30pva1GE,1950 +attr/exceptions.pyc,, +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=weDxwATsa69T_0bPVjiM1fGsciAMQmwhY5G8Jm5BxuI,1098 +attr/filters.pyc,, +attr/filters.pyi,sha256=xDpmKQlFdssgxGa5tsl1ADh_3zwAwAT4vUhd8h-8-Tk,214 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=0ElzHwdVK3dsYcQi2CXkFvhx8fNxUI5OVhw8SWeaKmA,1434 +attr/setters.pyc,, +attr/setters.pyi,sha256=SYr6adhx4f0dSkmmBICg6eK8WMev5jT-KJQJTdul078,567 +attr/validators.py,sha256=6DBx1jt4oZxx1ppvx6JWqm9-UAsYpXC4HTwxJilCeRg,11497 +attr/validators.pyc,, +attr/validators.pyi,sha256=vZgsJqUwrJevh4v_Hd7_RSXqDrBctE6-3AEZ7uYKodo,1868 +attrs-20.2.0.dist-info/AUTHORS.rst,sha256=wsqCNbGz_mklcJrt54APIZHZpoTIJLkXqEhhn4Nd8hc,752 +attrs-20.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-20.2.0.dist-info/LICENSE,sha256=v2WaKLSSQGAvVrvfSQy-LsUJsVuY-Z17GaUsdA4yeGM,1082 +attrs-20.2.0.dist-info/METADATA,sha256=pu7GIXdRWvO27VbOWfrNM7xC9usuENinHvcts6IbXLc,10541 +attrs-20.2.0.dist-info/RECORD,, +attrs-20.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs-20.2.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110 +attrs-20.2.0.dist-info/top_level.txt,sha256=tlRYMddkRlKPqJ96wP2_j9uEsmcNHgD2SbuWd4CzGVU,5 diff --git a/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/REQUESTED b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/WHEEL b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/WHEEL new file mode 100644 index 0000000..6d38aa0 --- /dev/null +++ b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/top_level.txt b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/top_level.txt new file mode 100644 index 0000000..66a062d --- /dev/null +++ b/venv/lib/python2.7/site-packages/attrs-20.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +attr diff --git a/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/INSTALLER b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/LICENSE b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/LICENSE new file mode 100644 index 0000000..5e795a6 --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/LICENSE @@ -0,0 +1,7 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/METADATA b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/METADATA new file mode 100644 index 0000000..4fe0e6b --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/METADATA @@ -0,0 +1,72 @@ +Metadata-Version: 2.1 +Name: backports.functools-lru-cache +Version: 1.6.1 +Summary: Backport of functools.lru_cache +Home-page: https://github.com/jaraco/backports.functools_lru_cache +Author: Raymond Hettinger +Author-email: raymond.hettinger@gmail.com +Maintainer: Jason R. Coombs +Maintainer-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Requires-Python: >=2.6 +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (!=3.7.3,>=3.5) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=1.2.3) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-black-multipy ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/backports.functools_lru_cache.svg + :target: https://pypi.org/project/backports.functools_lru_cache + +.. image:: https://img.shields.io/pypi/pyversions/backports.functools_lru_cache.svg + +.. image:: https://img.shields.io/travis/jaraco/backports.functools_lru_cache/master.svg + :target: https://travis-ci.org/jaraco/backports.functools_lru_cache + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. .. image:: https://img.shields.io/appveyor/ci/jaraco/skeleton/master.svg +.. :target: https://ci.appveyor.com/project/jaraco/skeleton/branch/master + +.. image:: https://readthedocs.org/projects/backportsfunctools_lru_cache/badge/?version=latest + :target: https://backportsfunctools_lru_cache.readthedocs.io/en/latest/?badge=latest + +.. image:: https://tidelift.com/badges/package/pypi/backports.functools_lru_cache + :target: https://tidelift.com/subscription/pkg/pypi-backports.functools_lru_cache?utm_source=pypi-backports.functools_lru_cache&utm_medium=readme + +Backport of functools.lru_cache from Python 3.3 as published at `ActiveState +`_. + +Usage +===== + +Consider using this technique for importing the 'lru_cache' function:: + + try: + from functools import lru_cache + except ImportError: + from backports.functools_lru_cache import lru_cache + + +Security Contact +================ + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. + + diff --git a/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/RECORD b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/RECORD new file mode 100644 index 0000000..601643a --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/RECORD @@ -0,0 +1,11 @@ +backports.functools_lru_cache-1.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +backports.functools_lru_cache-1.6.1.dist-info/LICENSE,sha256=pV4v_ptEmY5iHVHYwJS-0JrMS1I27nPX3zlaM7o8GP0,1050 +backports.functools_lru_cache-1.6.1.dist-info/METADATA,sha256=8utsfvJGCw6e9HcwGinBSm06j3h9mnP2Orfl6vhdgs8,2786 +backports.functools_lru_cache-1.6.1.dist-info/RECORD,, +backports.functools_lru_cache-1.6.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +backports.functools_lru_cache-1.6.1.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 +backports.functools_lru_cache-1.6.1.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10 +backports/__init__.py,sha256=jv2YF__bseklT3OWEzlqJ5qE24c4aWd5F4r0TTjOrWQ,65 +backports/__init__.pyc,, +backports/functools_lru_cache.py,sha256=Vd8mKVtsiiCGrvSjNgYt7zySiuPRSl5cOHQuJ2qsAq4,7220 +backports/functools_lru_cache.pyc,, diff --git a/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/REQUESTED b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/WHEEL b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/WHEEL new file mode 100644 index 0000000..8b701e9 --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.6) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/top_level.txt b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/top_level.txt new file mode 100644 index 0000000..99d2be5 --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.functools_lru_cache-1.6.1.dist-info/top_level.txt @@ -0,0 +1 @@ +backports diff --git a/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/DESCRIPTION.rst b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..933ff8c --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,29 @@ +backports.shutil_get_terminal_size +================================== + +A backport of the `get_terminal_size`_ function from Python 3.3's shutil. + +Unlike the original version it is written in pure Python rather than C, +so it might be a tiny bit slower. + +.. _get_terminal_size: https://docs.python.org/3/library/shutil.html#shutil.get_terminal_size + + +Example usage +------------- + + >>> from backports.shutil_get_terminal_size import get_terminal_size + >>> get_terminal_size() + terminal_size(columns=105, lines=33) + + + +History +======= + +1.0.0 (2014-08-19) +------------------ + +First release. + + diff --git a/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/INSTALLER b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/METADATA b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/METADATA new file mode 100644 index 0000000..6a1a0e9 --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/METADATA @@ -0,0 +1,44 @@ +Metadata-Version: 2.0 +Name: backports.shutil-get-terminal-size +Version: 1.0.0 +Summary: A backport of the get_terminal_size function from Python 3.3's shutil. +Home-page: https://github.com/chrippa/backports.shutil_get_terminal_size +Author: Christopher Rosell +Author-email: chrippa@tanuki.se +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.2 + +backports.shutil_get_terminal_size +================================== + +A backport of the `get_terminal_size`_ function from Python 3.3's shutil. + +Unlike the original version it is written in pure Python rather than C, +so it might be a tiny bit slower. + +.. _get_terminal_size: https://docs.python.org/3/library/shutil.html#shutil.get_terminal_size + + +Example usage +------------- + + >>> from backports.shutil_get_terminal_size import get_terminal_size + >>> get_terminal_size() + terminal_size(columns=105, lines=33) + + + +History +======= + +1.0.0 (2014-08-19) +------------------ + +First release. + + diff --git a/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/RECORD b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/RECORD new file mode 100644 index 0000000..e6faedf --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/RECORD @@ -0,0 +1,17 @@ +backports.shutil_get_terminal_size-1.0.0.dist-info/DESCRIPTION.rst,sha256=3i13kve8ULuCY1NGj7VKk089AZrjfYYRcCnZQlmiJBQ,596 +backports.shutil_get_terminal_size-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +backports.shutil_get_terminal_size-1.0.0.dist-info/METADATA,sha256=ODIV2b0snUagXjM5Rn9q9IognuuFf33nYkHm7Zk84lM,1175 +backports.shutil_get_terminal_size-1.0.0.dist-info/RECORD,, +backports.shutil_get_terminal_size-1.0.0.dist-info/WHEEL,sha256=6lxp_S3wZGmTBtGMVmNNLyvKFcp7HqQw2Wn4YYk-Suo,110 +backports.shutil_get_terminal_size-1.0.0.dist-info/metadata.json,sha256=TnbY_dmobLtSp7PRV0zAnwKTR_pdi3YlFS-u4rnkiZQ,701 +backports.shutil_get_terminal_size-1.0.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10 +backports/__init__.py,sha256=QDJyS5jtJnGFsSuOx43ZvLBCOrHImm8NrZk5f9URWdk,75 +backports/__init__.pyc,, +backports/shutil_get_terminal_size/__init__.py,sha256=sPUduFmFhru_q1gLcv5gMN3I3tUSSSU9YlvuVdhQjdo,338 +backports/shutil_get_terminal_size/__init__.pyc,, +backports/shutil_get_terminal_size/get_terminal_size.py,sha256=nany8-cgBwmqLe7giIJ0e7c2PV7dTp89Q7fGsxUrNQI,2913 +backports/shutil_get_terminal_size/get_terminal_size.pyc,, +shutil_backports/__init__.py,sha256=-4-2xF-gVmfYKY7LR63wlkIxAZmQAhNEEm-L1-oa748,226 +shutil_backports/__init__.pyc,, +shutil_backports/get_terminal_size.py,sha256=OLqTFt7sotXe48w1ET9P-E4T8XlnjFFA_ZiBF7Wcayg,2867 +shutil_backports/get_terminal_size.pyc,, diff --git a/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/WHEEL b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/WHEEL new file mode 100644 index 0000000..f19235c --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.23.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/metadata.json b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/metadata.json new file mode 100644 index 0000000..230e781 --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"summary": "A backport of the get_terminal_size function from Python 3.3's shutil.", "name": "backports.shutil-get-terminal-size", "license": "MIT", "contacts": [{"role": "author", "name": "Christopher Rosell", "email": "chrippa@tanuki.se"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/chrippa/backports.shutil_get_terminal_size"}, "generator": "bdist_wheel (0.23.0)", "classifiers": ["Development Status :: 5 - Production/Stable", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.2"], "version": "1.0.0", "metadata_version": "2.0"} \ No newline at end of file diff --git a/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/top_level.txt b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/top_level.txt new file mode 100644 index 0000000..99d2be5 --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports.shutil_get_terminal_size-1.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +backports diff --git a/venv/lib/python2.7/site-packages/backports/__init__.py b/venv/lib/python2.7/site-packages/backports/__init__.py new file mode 100644 index 0000000..3ad9513 --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports/__init__.py @@ -0,0 +1,2 @@ +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) diff --git a/venv/lib/python2.7/site-packages/backports/configparser/__init__.py b/venv/lib/python2.7/site-packages/backports/configparser/__init__.py new file mode 100644 index 0000000..603d604 --- /dev/null +++ b/venv/lib/python2.7/site-packages/backports/configparser/__init__.py @@ -0,0 +1,1473 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# flake8: noqa + +"""Configuration file parser. + +A configuration file consists of sections, lead by a "[section]" header, +and followed by "name: value" entries, with continuations and such in +the style of RFC 822. + +Intrinsic defaults can be specified by passing them into the +ConfigParser constructor as a dictionary. + +class: + +ConfigParser -- responsible for parsing a list of + configuration files, and managing the parsed database. + + methods: + + __init__(defaults=None, dict_type=_default_dict, allow_no_value=False, + delimiters=('=', ':'), comment_prefixes=('#', ';'), + inline_comment_prefixes=None, strict=True, + empty_lines_in_values=True, default_section='DEFAULT', + interpolation=, converters=): + Create the parser. When `defaults' is given, it is initialized into the + dictionary or intrinsic defaults. The keys must be strings, the values + must be appropriate for %()s string interpolation. + + When `dict_type' is given, it will be used to create the dictionary + objects for the list of sections, for the options within a section, and + for the default values. + + When `delimiters' is given, it will be used as the set of substrings + that divide keys from values. + + When `comment_prefixes' is given, it will be used as the set of + substrings that prefix comments in empty lines. Comments can be + indented. + + When `inline_comment_prefixes' is given, it will be used as the set of + substrings that prefix comments in non-empty lines. + + When `strict` is True, the parser won't allow for any section or option + duplicates while reading from a single source (file, string or + dictionary). Default is True. + + When `empty_lines_in_values' is False (default: True), each empty line + marks the end of an option. Otherwise, internal empty lines of + a multiline option are kept as part of the value. + + When `allow_no_value' is True (default: False), options without + values are accepted; the value presented for these is None. + + When `default_section' is given, the name of the special section is + named accordingly. By default it is called ``"DEFAULT"`` but this can + be customized to point to any other valid section name. Its current + value can be retrieved using the ``parser_instance.default_section`` + attribute and may be modified at runtime. + + When `interpolation` is given, it should be an Interpolation subclass + instance. It will be used as the handler for option value + pre-processing when using getters. RawConfigParser objects don't do + any sort of interpolation, whereas ConfigParser uses an instance of + BasicInterpolation. The library also provides a ``zc.buildbot`` + inspired ExtendedInterpolation implementation. + + When `converters` is given, it should be a dictionary where each key + represents the name of a type converter and each value is a callable + implementing the conversion from string to the desired datatype. Every + converter gets its corresponding get*() method on the parser object and + section proxies. + + sections() + Return all the configuration section names, sans DEFAULT. + + has_section(section) + Return whether the given section exists. + + has_option(section, option) + Return whether the given option exists in the given section. + + options(section) + Return list of configuration options for the named section. + + read(filenames, encoding=None) + Read and parse the iterable of named configuration files, given by + name. A single filename is also allowed. Non-existing files + are ignored. Return list of successfully read files. + + read_file(f, filename=None) + Read and parse one configuration file, given as a file object. + The filename defaults to f.name; it is only used in error + messages (if f has no `name' attribute, the string `' is used). + + read_string(string) + Read configuration from a given string. + + read_dict(dictionary) + Read configuration from a dictionary. Keys are section names, + values are dictionaries with keys and values that should be present + in the section. If the used dictionary type preserves order, sections + and their keys will be added in order. Values are automatically + converted to strings. + + get(section, option, raw=False, vars=None, fallback=_UNSET) + Return a string value for the named option. All % interpolations are + expanded in the return values, based on the defaults passed into the + constructor and the DEFAULT section. Additional substitutions may be + provided using the `vars' argument, which must be a dictionary whose + contents override any pre-existing defaults. If `option' is a key in + `vars', the value from `vars' is used. + + getint(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to an integer. + + getfloat(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a float. + + getboolean(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a boolean (currently case + insensitively defined as 0, false, no, off for False, and 1, true, + yes, on for True). Returns False or True. + + items(section=_UNSET, raw=False, vars=None) + If section is given, return a list of tuples with (name, value) for + each option in the section. Otherwise, return a list of tuples with + (section_name, section_proxy) for each section, including DEFAULTSECT. + + remove_section(section) + Remove the given file section and all its options. + + remove_option(section, option) + Remove the given option from the given section. + + set(section, option, value) + Set the given option. + + write(fp, space_around_delimiters=True) + Write the configuration state in .ini format. If + `space_around_delimiters' is True (the default), delimiters + between keys and values are surrounded by spaces. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +try: + from collections.abc import MutableMapping +except ImportError: + from collections import MutableMapping +import functools +import io +import itertools +import os +import re +import sys +import warnings + +from backports.configparser.helpers import OrderedDict as _default_dict +from backports.configparser.helpers import ChainMap as _ChainMap +from backports.configparser.helpers import from_none, open, str, PY2 +from backports.configparser.helpers import PathLike, fspath +from backports.configparser.helpers import MutableMapping + +__all__ = [ + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "ConfigParser", + "SafeConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "LegacyInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", +] + +DEFAULTSECT = "DEFAULT" + +MAX_INTERPOLATION_DEPTH = 10 + + +# exception classes +class Error(Exception): + """Base class for ConfigParser exceptions.""" + + def __init__(self, msg=''): + self.message = msg + Exception.__init__(self, msg) + + def __repr__(self): + return self.message + + __str__ = __repr__ + + +class NoSectionError(Error): + """Raised when no section matches a requested option.""" + + def __init__(self, section): + Error.__init__(self, 'No section: %r' % (section,)) + self.section = section + self.args = (section,) + + +class DuplicateSectionError(Error): + """Raised when a section is repeated in an input source. + + Possible repetitions that raise this exception are: multiple creation + using the API or in strict parsers when a section is found more than once + in a single input file, string or dictionary. + """ + + def __init__(self, section, source=None, lineno=None): + msg = [repr(section), " already exists"] + if source is not None: + message = ["While reading from ", repr(source)] + if lineno is not None: + message.append(" [line {0:2d}]".format(lineno)) + message.append(": section ") + message.extend(msg) + msg = message + else: + msg.insert(0, "Section ") + Error.__init__(self, "".join(msg)) + self.section = section + self.source = source + self.lineno = lineno + self.args = (section, source, lineno) + + +class DuplicateOptionError(Error): + """Raised by strict parsers when an option is repeated in an input source. + + Current implementation raises this exception only when an option is found + more than once in a single file, string or dictionary. + """ + + def __init__(self, section, option, source=None, lineno=None): + msg = [repr(option), " in section ", repr(section), " already exists"] + if source is not None: + message = ["While reading from ", repr(source)] + if lineno is not None: + message.append(" [line {0:2d}]".format(lineno)) + message.append(": option ") + message.extend(msg) + msg = message + else: + msg.insert(0, "Option ") + Error.__init__(self, "".join(msg)) + self.section = section + self.option = option + self.source = source + self.lineno = lineno + self.args = (section, option, source, lineno) + + +class NoOptionError(Error): + """A requested option was not found.""" + + def __init__(self, option, section): + Error.__init__(self, "No option %r in section: %r" % (option, section)) + self.option = option + self.section = section + self.args = (option, section) + + +class InterpolationError(Error): + """Base class for interpolation-related exceptions.""" + + def __init__(self, option, section, msg): + Error.__init__(self, msg) + self.option = option + self.section = section + self.args = (option, section, msg) + + +class InterpolationMissingOptionError(InterpolationError): + """A string substitution required a setting which was not available.""" + + def __init__(self, option, section, rawval, reference): + msg = ( + "Bad value substitution: option {0!r} in section {1!r} contains " + "an interpolation key {2!r} which is not a valid option name. " + "Raw value: {3!r}".format(option, section, reference, rawval) + ) + InterpolationError.__init__(self, option, section, msg) + self.reference = reference + self.args = (option, section, rawval, reference) + + +class InterpolationSyntaxError(InterpolationError): + """Raised when the source text contains invalid syntax. + + Current implementation raises this exception when the source text into + which substitutions are made does not conform to the required syntax. + """ + + +class InterpolationDepthError(InterpolationError): + """Raised when substitutions are nested too deeply.""" + + def __init__(self, option, section, rawval): + msg = ( + "Recursion limit exceeded in value substitution: option {0!r} " + "in section {1!r} contains an interpolation key which " + "cannot be substituted in {2} steps. Raw value: {3!r}" + "".format(option, section, MAX_INTERPOLATION_DEPTH, rawval) + ) + InterpolationError.__init__(self, option, section, msg) + self.args = (option, section, rawval) + + +class ParsingError(Error): + """Raised when a configuration file does not follow legal syntax.""" + + def __init__(self, source=None, filename=None): + # Exactly one of `source'/`filename' arguments has to be given. + # `filename' kept for compatibility. + if filename and source: + raise ValueError( + "Cannot specify both `filename' and `source'. " "Use `source'." + ) + elif not filename and not source: + raise ValueError("Required argument `source' not given.") + elif filename: + source = filename + Error.__init__(self, 'Source contains parsing errors: %r' % source) + self.source = source + self.errors = [] + self.args = (source,) + + @property + def filename(self): + """Deprecated, use `source'.""" + warnings.warn( + "The 'filename' attribute will be removed in future versions. " + "Use 'source' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.source + + @filename.setter + def filename(self, value): + """Deprecated, user `source'.""" + warnings.warn( + "The 'filename' attribute will be removed in future versions. " + "Use 'source' instead.", + DeprecationWarning, + stacklevel=2, + ) + self.source = value + + def append(self, lineno, line): + self.errors.append((lineno, line)) + self.message += '\n\t[line %2d]: %s' % (lineno, line) + + +class MissingSectionHeaderError(ParsingError): + """Raised when a key-value pair is found before any section header.""" + + def __init__(self, filename, lineno, line): + Error.__init__( + self, + 'File contains no section headers.\nfile: %r, line: %d\n%r' + % (filename, lineno, line), + ) + self.source = filename + self.lineno = lineno + self.line = line + self.args = (filename, lineno, line) + + +# Used in parser getters to indicate the default behaviour when a specific +# option is not found it to raise an exception. Created to enable `None' as +# a valid fallback value. +_UNSET = object() + + +class Interpolation(object): + """Dummy interpolation that passes the value through with no changes.""" + + def before_get(self, parser, section, option, value, defaults): + return value + + def before_set(self, parser, section, option, value): + return value + + def before_read(self, parser, section, option, value): + return value + + def before_write(self, parser, section, option, value): + return value + + +class BasicInterpolation(Interpolation): + """Interpolation as implemented in the classic ConfigParser. + + The option values can contain format strings which refer to other values in + the same section, or values in the special default section. + + For example: + + something: %(dir)s/whatever + + would resolve the "%(dir)s" to the value of dir. All reference + expansions are done late, on demand. If a user needs to use a bare % in + a configuration file, she can escape it by writing %%. Other % usage + is considered a user error and raises `InterpolationSyntaxError'.""" + + _KEYCRE = re.compile(r"%\(([^)]+)\)s") + + def before_get(self, parser, section, option, value, defaults): + L = [] + self._interpolate_some(parser, option, L, value, section, defaults, 1) + return ''.join(L) + + def before_set(self, parser, section, option, value): + tmp_value = value.replace('%%', '') # escaped percent signs + tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax + if '%' in tmp_value: + raise ValueError( + "invalid interpolation syntax in %r at " + "position %d" % (value, tmp_value.find('%')) + ) + return value + + def _interpolate_some(self, parser, option, accum, rest, section, map, depth): + rawval = parser.get(section, option, raw=True, fallback=rest) + if depth > MAX_INTERPOLATION_DEPTH: + raise InterpolationDepthError(option, section, rawval) + while rest: + p = rest.find("%") + if p < 0: + accum.append(rest) + return + if p > 0: + accum.append(rest[:p]) + rest = rest[p:] + # p is no longer used + c = rest[1:2] + if c == "%": + accum.append("%") + rest = rest[2:] + elif c == "(": + m = self._KEYCRE.match(rest) + if m is None: + raise InterpolationSyntaxError( + option, + section, + "bad interpolation variable reference %r" % rest, + ) + var = parser.optionxform(m.group(1)) + rest = rest[m.end() :] + try: + v = map[var] + except KeyError: + raise from_none( + InterpolationMissingOptionError(option, section, rawval, var) + ) + if "%" in v: + self._interpolate_some( + parser, option, accum, v, section, map, depth + 1 + ) + else: + accum.append(v) + else: + raise InterpolationSyntaxError( + option, + section, + "'%%' must be followed by '%%' or '(', " "found: %r" % (rest,), + ) + + +class ExtendedInterpolation(Interpolation): + """Advanced variant of interpolation, supports the syntax used by + `zc.buildout'. Enables interpolation between sections.""" + + _KEYCRE = re.compile(r"\$\{([^}]+)\}") + + def before_get(self, parser, section, option, value, defaults): + L = [] + self._interpolate_some(parser, option, L, value, section, defaults, 1) + return ''.join(L) + + def before_set(self, parser, section, option, value): + tmp_value = value.replace('$$', '') # escaped dollar signs + tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax + if '$' in tmp_value: + raise ValueError( + "invalid interpolation syntax in %r at " + "position %d" % (value, tmp_value.find('$')) + ) + return value + + def _interpolate_some(self, parser, option, accum, rest, section, map, depth): + rawval = parser.get(section, option, raw=True, fallback=rest) + if depth > MAX_INTERPOLATION_DEPTH: + raise InterpolationDepthError(option, section, rawval) + while rest: + p = rest.find("$") + if p < 0: + accum.append(rest) + return + if p > 0: + accum.append(rest[:p]) + rest = rest[p:] + # p is no longer used + c = rest[1:2] + if c == "$": + accum.append("$") + rest = rest[2:] + elif c == "{": + m = self._KEYCRE.match(rest) + if m is None: + raise InterpolationSyntaxError( + option, + section, + "bad interpolation variable reference %r" % rest, + ) + path = m.group(1).split(':') + rest = rest[m.end() :] + sect = section + opt = option + try: + if len(path) == 1: + opt = parser.optionxform(path[0]) + v = map[opt] + elif len(path) == 2: + sect = path[0] + opt = parser.optionxform(path[1]) + v = parser.get(sect, opt, raw=True) + else: + raise InterpolationSyntaxError( + option, section, "More than one ':' found: %r" % (rest,) + ) + except (KeyError, NoSectionError, NoOptionError): + raise from_none( + InterpolationMissingOptionError( + option, section, rawval, ":".join(path) + ) + ) + if "$" in v: + self._interpolate_some( + parser, + opt, + accum, + v, + sect, + dict(parser.items(sect, raw=True)), + depth + 1, + ) + else: + accum.append(v) + else: + raise InterpolationSyntaxError( + option, + section, + "'$' must be followed by '$' or '{', " "found: %r" % (rest,), + ) + + +class LegacyInterpolation(Interpolation): + """Deprecated interpolation used in old versions of ConfigParser. + Use BasicInterpolation or ExtendedInterpolation instead.""" + + _KEYCRE = re.compile(r"%\(([^)]*)\)s|.") + + def before_get(self, parser, section, option, value, vars): + rawval = value + depth = MAX_INTERPOLATION_DEPTH + while depth: # Loop through this until it's done + depth -= 1 + if value and "%(" in value: + replace = functools.partial(self._interpolation_replace, parser=parser) + value = self._KEYCRE.sub(replace, value) + try: + value = value % vars + except KeyError as e: + raise from_none( + InterpolationMissingOptionError( + option, section, rawval, e.args[0] + ) + ) + else: + break + if value and "%(" in value: + raise InterpolationDepthError(option, section, rawval) + return value + + def before_set(self, parser, section, option, value): + return value + + @staticmethod + def _interpolation_replace(match, parser): + s = match.group(1) + if s is None: + return match.group() + else: + return "%%(%s)s" % parser.optionxform(s) + + +class RawConfigParser(MutableMapping): + """ConfigParser that does not do interpolation.""" + + # Regular expressions for parsing section headers and options + _SECT_TMPL = r""" + \[ # [ + (?P
[^]]+) # very permissive! + \] # ] + """ + _OPT_TMPL = r""" + (?P